diff --git a/Cargo.lock b/Cargo.lock index 1ae4054..9b64464 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,15 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "anyhow" version = "1.0.101" @@ -169,8 +178,22 @@ version = "0.4.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" dependencies = [ + "iana-time-zone", + "js-sys", "num-traits", "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf", ] [[package]] @@ -354,6 +377,7 @@ dependencies = [ "pem", "pyo3", "pyo3-async-runtimes", + "regorus", "reqwest", "ring", "rustls-pki-types", @@ -1028,6 +1052,30 @@ dependencies = [ "windows-registry", ] +[[package]] +name = "iana-time-zone" +version = "0.1.65" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + [[package]] name = "icu_collections" version = "2.1.1" @@ -1350,6 +1398,16 @@ dependencies = [ "tempfile", ] +[[package]] +name = "num-bigint" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" +dependencies = [ + "num-integer", + "num-traits", +] + [[package]] name = "num-bigint-dig" version = "0.8.6" @@ -1550,6 +1608,24 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -1878,6 +1954,51 @@ dependencies = [ "bitflags", ] +[[package]] +name = "regex" +version = "1.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" +dependencies = [ + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" +dependencies = [ + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" + +[[package]] +name = "regorus" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4431401ee52bd814219a0d5475d7825e82a17aa337ab718660820914f06267fd" +dependencies = [ + "anyhow", + "chrono", + "chrono-tz", + "lazy_static", + "num-bigint", + "num-traits", + "regex", + "semver", + "serde", + "serde_json", + "spin", + "thiserror", +] + [[package]] name = "reqwest" version = "0.12.28" @@ -2255,6 +2376,12 @@ version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" +[[package]] +name = "siphasher" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" + [[package]] name = "slab" version = "0.4.12" @@ -2860,6 +2987,41 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" +[[package]] +name = "windows-core" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "windows-link" version = "0.2.1" diff --git a/Cargo.toml b/Cargo.toml index 8a6d5f7..a33621f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -76,6 +76,11 @@ signature = { version = "2.2", default-features = false, optional = true } dcap-qvl-webpki = { version = "=0.103.4", features = ["alloc"] } +# Rego policy engine (optional, for Intel QAL compatibility) +regorus = { version = "0.9", optional = true, default-features = false, features = [ + "arc", "time", "regex", "semver", +] } + [dev-dependencies] hex-literal = "1.1.0" @@ -89,7 +94,7 @@ name = "dcap_qvl" crate-type = ["cdylib", "staticlib", "rlib"] [features] -default = ["std", "report", "ring", "rustcrypto"] +default = ["std", "report", "ring", "rustcrypto", "rego"] std = [ "serde/std", "scale/std", @@ -106,13 +111,14 @@ std = [ borsh = ["dep:borsh"] borsh_schema = ["borsh", "borsh/unstable__schema"] report = ["std", "tracing", "futures", "reqwest"] -js = ["getrandom/js", "serde-wasm-bindgen", "wasm-bindgen"] -python = ["pyo3", "pyo3-async-runtimes", "tokio", "std", "report", "ring"] +js = ["getrandom/js", "serde-wasm-bindgen", "wasm-bindgen", "rustcrypto", "rego"] +python = ["pyo3", "pyo3-async-runtimes", "tokio", "std", "report", "ring", "rego"] go = ["std", "ring", "serde_json"] ring = ["dep:ring", "dcap-qvl-webpki/ring", "_anycrypto"] rustcrypto = ["dep:sha2", "dep:p256", "dep:signature", "dcap-qvl-webpki/rustcrypto", "_anycrypto"] _anycrypto = [] contract = ["getrandom"] +rego = ["dep:regorus", "std", "serde_json", "getrandom"] # Enables the dangerous_verify_with_tcb_override() function, allowing TCB checks to be overridden # with custom collateral. Normal verify() is not affected. diff --git a/README.md b/README.md index a7bc3d5..835bceb 100644 --- a/README.md +++ b/README.md @@ -73,8 +73,8 @@ use dcap_qvl::verify::rustcrypto::verify; ```rust use dcap_qvl::collateral::get_collateral; -// Use explicit backend for predictable behavior -use dcap_qvl::verify::ring::verify; +use dcap_qvl::verify::{QuoteVerifier, ring}; +use dcap_qvl::SimplePolicy; use dcap_qvl::PHALA_PCCS_URL; #[tokio::main] @@ -86,11 +86,40 @@ async fn main() { let collateral = get_collateral(&pccs_url, "e).await.expect("failed to get collateral"); let now = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs(); - let report = verify("e, &collateral, now).expect("failed to verify quote"); + + // Phase 1: Cryptographic verification + let verifier = QuoteVerifier::new_prod(ring::backend()); + let result = verifier.verify("e, collateral, now).expect("verification failed"); + + // Phase 2: Policy validation + let report = result.validate(&SimplePolicy::strict(now)).expect("policy failed"); println!("{:?}", report); } ``` +# Policy Validation + +After cryptographic verification, apply a **policy** to check TCB status, advisory IDs, collateral freshness, and platform flags. + +```rust +use dcap_qvl::{SimplePolicy, TcbStatus}; +use core::time::Duration; + +// Strict: only UpToDate (default) +let policy = SimplePolicy::strict(now); + +// Relaxed: accept OutOfDate + 30-day collateral grace +let policy = SimplePolicy::strict(now) + .allow_status(TcbStatus::OutOfDate) + .collateral_grace_period(Duration::from_secs(30 * 24 * 3600)) + .reject_advisory("INTEL-SA-00334") + .reject_advisories(&["INTEL-SA-00615", "INTEL-SA-00809"]); +``` + +For custom validation logic, implement the `Policy` trait directly. + +See [docs/policy.md](docs/policy.md) for the complete policy guide, including grace period semantics, platform flags, `RegoPolicy`, and custom `Policy` trait examples. + # Python Bindings @@ -112,18 +141,61 @@ make test_python_versions ```python import asyncio +import time import dcap_qvl async def main(): quote_data = open("quote.bin", "rb").read() - # Get collateral and verify in one step (defaults to Phala PCCS) + # Get collateral and perform crypto verification (defaults to Phala PCCS) result = await dcap_qvl.get_collateral_and_verify(quote_data) - print(f"Status: {result.status}") + + # Validate with SimplePolicy + now = int(time.time()) + policy = dcap_qvl.SimplePolicy.strict(now) + report = result.validate(policy) + print(f"Status: {report.status}") asyncio.run(main()) ``` +You can also validate with Intel QAL-compatible Rego policies: + +```python +policy_json = r'''{ + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570" + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0 + } +}''' + +rego_policy = dcap_qvl.RegoPolicy(policy_json) +report = result.validate(rego_policy) +``` + +And from JS/WASM: + +```js +import init, { QuoteVerifier, SimplePolicy, RegoPolicy } from "@phala/dcap-qvl-web"; + +await init(); + +const collateral = await QuoteVerifier.get_collateral(pccsUrl, quoteBytes); +const verifier = new QuoteVerifier(); +const now = BigInt(Math.floor(Date.now() / 1000)); +const result = verifier.verify(quoteBytes, collateral, now); + +const simplePolicy = new SimplePolicy(now); +const report1 = result.validate(simplePolicy); + +const regoPolicy = new RegoPolicy(policyJson); +const result2 = verifier.verify(quoteBytes, collateral, now); +const report2 = result2.validate_rego(regoPolicy); +``` + See [python-bindings/](python-bindings/) for complete documentation, examples, and testing information. # License diff --git a/cli/README.md b/cli/README.md index 8b9e197..a38f2e4 100644 --- a/cli/README.md +++ b/cli/README.md @@ -9,3 +9,9 @@ git clone https://github.com/Phala-Network/dcap-qvl.git cd dcap-qvl/cli cargo run -- decode-quote --hex ../sample/tdx-quote.hex | jq . ``` + +Strict verification: + +```sh +cargo run -- verify --strict --hex ../sample/tdx-quote.hex +``` diff --git a/cli/src/bin/test_case.rs b/cli/src/bin/test_case.rs index 31972dc..b638083 100644 --- a/cli/src/bin/test_case.rs +++ b/cli/src/bin/test_case.rs @@ -113,10 +113,12 @@ fn run_verify(quote_file: PathBuf, collateral_file: PathBuf, root_ca_file: Optio }; let ring_result = ring_verifier - .verify("e_bytes, &collateral, now) + .verify("e_bytes, collateral.clone(), now) + .map(|r| r.into_report_unchecked()) .map_err(|e| format!("{e:#}")); let rustcrypto_result = rustcrypto_verifier - .verify("e_bytes, &collateral, now) + .verify("e_bytes, collateral, now) + .map(|r| r.into_report_unchecked()) .map_err(|e| format!("{e:#}")); if ring_result != rustcrypto_result { eprintln!("Verification results differ between ring and rustcrypto"); @@ -125,20 +127,14 @@ fn run_verify(quote_file: PathBuf, collateral_file: PathBuf, root_ca_file: Optio return 1; } - let ring_result1 = ring_verifier.verify("e_bytes, &collateral, now); - match ring_result1 { - Ok(verified_report) => { + match ring_result { + Ok(report) => { println!("Verification successful"); - println!("Status: {:?}", verified_report.status); + println!("Status: {}", report.status); 0 } Err(e) => { - eprintln!("Verification failed: {}", e); - let mut source = e.source(); - while let Some(err) = source { - eprintln!(" Caused by: {}", err); - source = err.source(); - } + eprintln!("Verification failed: {e}"); 1 } } diff --git a/cli/src/main.rs b/cli/src/main.rs index 59b629e..f5e2aa3 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -9,7 +9,8 @@ use clap::{Args, Parser, Subcommand}; use dcap_qvl::collateral::{get_collateral, PHALA_PCCS_URL}; use dcap_qvl::intel; use dcap_qvl::quote::Quote; -use dcap_qvl::verify::verify; +use dcap_qvl::verify::{ring, QuoteVerifier}; +use dcap_qvl::SimplePolicy; use der::Decode; use serde::Serialize; use x509_cert::Certificate; @@ -48,6 +49,9 @@ struct VerifyQuoteArgs { /// Indicate the quote file is in hex format #[arg(long)] hex: bool, + /// Apply SimplePolicy::strict(now) after cryptographic verification + #[arg(long)] + strict: bool, /// The quote file quote_file: PathBuf, } @@ -103,12 +107,26 @@ async fn command_verify_quote(args: VerifyQuoteArgs) -> Result<()> { let now = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH)? .as_secs(); - let report = verify("e, &collateral, now).context("Failed to verify quote")?; + let verifier = QuoteVerifier::new_prod(ring::backend()); + let result = verifier + .verify("e, collateral, now) + .context("Failed to verify quote")?; + let report = if args.strict { + result + .validate(&SimplePolicy::strict(now)) + .context("Strict policy validation failed")? + } else { + result.into_report_unchecked() + }; println!( "{}", serde_json::to_string(&report).context("Failed to serialize report")? ); - eprintln!("Quote verified"); + if args.strict { + eprintln!("Quote verified under strict policy"); + } else { + eprintln!("Quote verified"); + } Ok(()) } diff --git a/docs/policy.md b/docs/policy.md new file mode 100644 index 0000000..dfee5aa --- /dev/null +++ b/docs/policy.md @@ -0,0 +1,197 @@ +# Policy Validation + +After cryptographic verification, `dcap-qvl` supports a **policy validation** phase that checks the platform's TCB (Trusted Computing Base) status, advisory IDs, collateral freshness, and platform configuration flags. + +## Two-Phase Verification + +``` +verify() ──► QuoteVerificationResult ──► validate(policy) ──► VerifiedReport + │ │ │ + │ ├─ supplemental() ├─ SimplePolicy (built-in) + │ │ (lazy, on demand) ├─ RegoPolicy (Intel Rego script) + │ │ └─ impl Policy (custom) + │ └─ into_report_unchecked() + │ (skip policy) + crypto only inspect data enforce rules +``` + +- **`verify()`** — performs cryptographic verification only (signature, certificate chain, CRL, QE identity). Returns `QuoteVerificationResult`. +- **`supplemental()`** — lazily builds `SupplementalData` with TCB status, advisory IDs, platform flags, etc. +- **`validate(policy)`** — applies a `Policy` to the supplemental data. Returns `VerifiedReport` on success. +- **`into_report_unchecked()`** — skips policy validation entirely (use when you handle validation externally). + +## SimplePolicy + +The built-in policy with 9 checks from Intel's Appraisal framework. Strict by default — only `UpToDate` status, no grace period, no advisory blacklist. + +### Basic Usage + +```rust +use dcap_qvl::verify::{QuoteVerifier, ring}; +use dcap_qvl::SimplePolicy; + +let verifier = QuoteVerifier::new_prod(ring::backend()); +let result = verifier.verify("e, collateral, now)?; + +// Strict: only UpToDate, collateral must not be expired +let report = result.validate(&SimplePolicy::strict(now))?; +``` + +### Builder Methods + +```rust +use dcap_qvl::{SimplePolicy, TcbStatus}; +use core::time::Duration; + +let policy = SimplePolicy::strict(now) + // Accept additional TCB statuses + .allow_status(TcbStatus::SWHardeningNeeded) + .allow_status(TcbStatus::ConfigurationNeeded) + // Reject specific advisory IDs (case-insensitive) + .reject_advisory("INTEL-SA-00334") + .reject_advisory("INTEL-SA-00615") + .reject_advisories(&["INTEL-SA-00809", "INTEL-SA-00820"]) + // Collateral freshness: accept expired collateral within grace window + .collateral_grace_period(Duration::from_secs(30 * 24 * 3600)) // 30 days + // Minimum TCB evaluation data number + .min_tcb_eval_data_number(17) + // Platform flags (default: reject True) + .allow_dynamic_platform(true) + .allow_cached_keys(true) + .allow_smt(true) + // SGX type whitelist (default: skip check) + .accepted_sgx_types(&[0, 1]); // Standard + Scalable +``` + +### The 9 Checks + +| # | Check | Default | Builder | +|---|-------|---------|---------| +| 1 | **TCB status whitelist** | Only `UpToDate` | `.allow_status(...)` | +| 2 | **Advisory ID blacklist** | Empty set (allow all) | `.reject_advisory(...)` | +| 3 | **Collateral expiration** | `earliest_expiration >= now` | `.collateral_grace_period(Duration)` | +| 4 | **Platform TCB freshness** | Only for OutOfDate statuses | `.platform_grace_period(Duration)` | +| 4b | **QE TCB freshness** | Only for QE `OutOfDate` | `.qe_grace_period(Duration)` | +| 5 | **Min TCB eval data number** | Skip | `.min_tcb_eval_data_number(n)` | +| 6 | **Dynamic platform flag** | Reject `True` | `.allow_dynamic_platform(true)` | +| 7 | **Cached keys flag** | Reject `True` | `.allow_cached_keys(true)` | +| 8 | **SMT flag** | Reject `True` | `.allow_smt(true)` | +| 9 | **SGX type whitelist** | Skip | `.accepted_sgx_types(&[0, 1, 2])` | + +### Grace Period Behavior + +**Collateral grace** (`collateral_grace_period`): Extends the collateral expiration window. If `earliest_expiration + grace >= now`, the quote is accepted. + +**Platform grace** (`platform_grace_period`): Applies only to the **platform** TCB level. For `OutOfDate` / `OutOfDateConfigurationNeeded`, checks `platform.tcb_date_tag + grace >= now`. For pure `OutOfDate`, only the **platform** advisories are skipped during the grace window. For `OutOfDateConfigurationNeeded`, platform advisories are still checked. + +**QE grace** (`qe_grace_period`): Applies only to the **QE** TCB level. For QE `OutOfDate`, checks `qe.tcb_level.tcb_date + grace >= now`. QE advisories are skipped only while this QE grace is active. + +`collateral_grace_period` is **mutually exclusive** with the TCB grace windows — setting it together with `platform_grace_period` or `qe_grace_period` causes a validation error. + +### Platform Flags (Three-State) + +Platform flags (`dynamic_platform`, `cached_keys`, `smt_enabled`) use `PckCertFlag` with three values: + +| Value | Meaning | Default behavior | +|-------|---------|-----------------| +| `True` | Flag is set | **Rejected** | +| `False` | Flag is explicitly unset | Accepted | +| `Undefined` | Not present (Processor CA certs) | Accepted | + +Only `True` is rejected by default. `False` and `Undefined` always pass. + +## Custom Policy + +For logic that `SimplePolicy` cannot express, implement the `Policy` trait directly: + +```rust +use dcap_qvl::{Policy, SupplementalData, TcbStatus}; +use anyhow::{bail, Result}; + +struct MyPolicy { + now: u64, + grace_secs: u64, +} + +impl Policy for MyPolicy { + fn validate(&self, data: &SupplementalData) -> Result<()> { + let in_grace = data.platform.tcb_date_tag + .saturating_add(self.grace_secs) >= self.now; + + // Conditional logic based on grace window + if !in_grace && data.tcb.status != TcbStatus::UpToDate { + bail!("Only UpToDate accepted outside grace period"); + } + + // Check specific advisories even during grace + for id in &data.tcb.advisory_ids { + if id == "INTEL-SA-00220" { + bail!("Critical advisory {id} always rejected"); + } + } + + Ok(()) + } +} +``` + +## RegoPolicy (feature: `rego`) + +Runs Intel's official `qal_script.rego` via the `regorus` Rego interpreter. Accepts a JSON policy string matching Intel's format: + +```rust +use dcap_qvl::RegoPolicy; + +let policy_json = r#"{ + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570" + }, + "reference": { + "accepted_tcb_status": ["UpToDate", "SWHardeningNeeded"], + "collateral_grace_period": 7776000 + } +}"#; +let policy = RegoPolicy::new(policy_json)?; +let report = result.validate(&policy)?; +``` + +`RegoPolicySet` supports multiple JSON policies for multi-measurement appraisal (one per `class_id`), matching Intel QAL's full functionality. Both `RegoPolicy` and `RegoPolicySet` implement the `Policy` trait, so they work with the standard `validate()` method. + +### Python + +```python +import dcap_qvl + +policy_json = r'''{ + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570" + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0 + } +}''' + +policy = dcap_qvl.RegoPolicy(policy_json) +report = result.validate(policy) +``` + +### JS / WASM + +```js +import init, { QuoteVerifier, RegoPolicy, RegoPolicySet } from "@phala/dcap-qvl-web"; + +await init(); + +const verifier = new QuoteVerifier(); +const result = verifier.verify(quoteBytes, collateral, now); + +const policy = new RegoPolicy(policyJson); +const report = result.validate_rego(policy); + +const policySet = new RegoPolicySet([platformPolicyJson, tenantPolicyJson]); +const result2 = verifier.verify(quoteBytes, collateral, now); +const report2 = result2.validate_rego_set(policySet); +``` + +See [Intel's DCAP Appraisal documentation](https://github.com/intel/SGXDataCenterAttestationPrimitives) for the Rego policy JSON format. diff --git a/python-bindings/python/dcap_qvl/__init__.py b/python-bindings/python/dcap_qvl/__init__.py index edd11d9..293c051 100644 --- a/python-bindings/python/dcap_qvl/__init__.py +++ b/python-bindings/python/dcap_qvl/__init__.py @@ -4,12 +4,20 @@ This package provides Python bindings for the DCAP (Data Center Attestation Primitives) quote verification library implemented in Rust. +Two-phase verification API (matches Rust): +1. verify(quote, collateral, now_secs) -> QuoteVerificationResult (crypto only) +2. result.validate(policy) -> VerifiedReport (policy checks) + Main classes: - QuoteCollateralV3: Represents quote collateral data -- VerifiedReport: Contains verification results +- QuoteVerificationResult: Intermediate result from crypto verification +- VerifiedReport: Contains verification results after policy validation +- SimplePolicy: Verification policy with builder pattern +- RegoPolicy: Intel QAL-compatible Rego policy +- RegoPolicySet: Intel QAL-compatible multi-policy set Main functions: -- verify: Verify a quote with collateral data +- verify: Verify a quote with collateral data (returns QuoteVerificationResult) - get_collateral: Get collateral from PCCS URL - get_collateral_from_pcs: Get collateral from Intel PCS - get_collateral_and_verify: Get collateral and verify quote @@ -22,11 +30,15 @@ from ._dcap_qvl import ( PyQuoteCollateralV3 as QuoteCollateralV3, PyVerifiedReport as VerifiedReport, + PyQuoteVerificationResult as QuoteVerificationResult, PyQuoteHeader as QuoteHeader, PyTdReport10 as TdReport10, PyTdReport15 as TdReport15, PySgxEnclaveReport as SgxEnclaveReport, PyPckExtension as PckExtension, + PySimplePolicy as SimplePolicy, + PyRegoPolicy as RegoPolicy, + PyRegoPolicySet as RegoPolicySet, PyQuote as Quote, py_verify as verify, py_verify_with_root_ca as verify_with_root_ca, @@ -92,16 +104,20 @@ async def get_collateral_from_pcs(raw_quote: bytes) -> QuoteCollateralV3: async def get_collateral_and_verify( - raw_quote: bytes, pccs_url: Optional[str] = None -) -> VerifiedReport: - """Get collateral and verify the quote. + raw_quote: bytes, + pccs_url: Optional[str] = None, +) -> QuoteVerificationResult: + """Get collateral and verify the quote (crypto only). + + Returns a QuoteVerificationResult that must be validated with a policy + via .validate(policy) to get a VerifiedReport. Args: raw_quote: Raw quote bytes pccs_url: Optional PCCS URL (defaults to Phala PCCS) Returns: - VerifiedReport: Verification result + QuoteVerificationResult: Use .validate(policy) to get VerifiedReport Raises: ValueError: If quote is invalid or verification fails @@ -112,21 +128,23 @@ async def get_collateral_and_verify( # Get collateral collateral = await get_collateral(url, raw_quote) - # Get current time + # Verify quote (crypto only) now_secs = int(time.time()) - - # Verify quote return verify(raw_quote, collateral, now_secs) __all__ = [ "QuoteCollateralV3", + "QuoteVerificationResult", "VerifiedReport", "QuoteHeader", "TdReport10", "TdReport15", "SgxEnclaveReport", "PckExtension", + "SimplePolicy", + "RegoPolicy", + "RegoPolicySet", "AttestationKeyType", "TeeType", "Quote", diff --git a/python-bindings/python/dcap_qvl/_dcap_qvl.pyi b/python-bindings/python/dcap_qvl/_dcap_qvl.pyi index 2171d7e..83717e3 100644 --- a/python-bindings/python/dcap_qvl/_dcap_qvl.pyi +++ b/python-bindings/python/dcap_qvl/_dcap_qvl.pyi @@ -313,6 +313,138 @@ class PyPckExtension: ... +class PySimplePolicy: + """Verification policy with builder pattern. + + Use ``SimplePolicy.strict(now_secs)`` to create a strict policy (only UpToDate), + then chain builder methods to relax constraints. + + Example:: + + policy = SimplePolicy.strict(now_secs) \\ + .allow_status("SWHardeningNeeded") \\ + .reject_advisory("INTEL-SA-00334") \\ + .collateral_grace_period(90 * 24 * 3600) \\ + .qe_grace_period(7 * 24 * 3600) + """ + + @staticmethod + def strict(now_secs: int) -> "PySimplePolicy": + """Create a strict policy: only UpToDate, no grace, no advisory blacklist.""" + ... + + def allow_status(self, status: str) -> "PySimplePolicy": + """Allow an additional TCB status (e.g. "SWHardeningNeeded").""" + ... + + def reject_advisory(self, advisory_id: str) -> "PySimplePolicy": + """Reject a specific advisory ID (e.g. "INTEL-SA-00334").""" + ... + + def reject_advisories(self, advisory_ids: List[str]) -> "PySimplePolicy": + """Reject multiple advisory IDs at once.""" + ... + + def collateral_grace_period(self, secs: int) -> "PySimplePolicy": + """Set collateral grace period in seconds.""" + ... + + def platform_grace_period(self, secs: int) -> "PySimplePolicy": + """Set platform grace period in seconds.""" + ... + + def qe_grace_period(self, secs: int) -> "PySimplePolicy": + """Set QE grace period in seconds.""" + ... + + def min_tcb_eval_data_number(self, min: int) -> "PySimplePolicy": + """Set minimum TCB evaluation data number.""" + ... + + def allow_dynamic_platform(self, allow: bool) -> "PySimplePolicy": + """Set whether dynamic platforms are allowed.""" + ... + + def allow_cached_keys(self, allow: bool) -> "PySimplePolicy": + """Set whether cached keys are allowed.""" + ... + + def allow_smt(self, allow: bool) -> "PySimplePolicy": + """Set whether SMT (hyperthreading) is allowed.""" + ... + + def accepted_sgx_types(self, types: List[int]) -> "PySimplePolicy": + """Set accepted SGX types (e.g. [0, 1, 2]).""" + ... + + +class PyRegoPolicy: + """Intel QAL-compatible Rego policy.""" + + def __init__(self, policy_json: str) -> None: + """Create a Rego policy from Intel-format JSON.""" + ... + + @staticmethod + def with_rego(policy_json: str, rego_source: str) -> "PyRegoPolicy": + """Create a Rego policy with a custom Rego script.""" + ... + + +class PyRegoPolicySet: + """Intel QAL-compatible multi-measurement Rego policy set.""" + + def __init__(self, policy_jsons: List[str]) -> None: + """Create a Rego policy set from multiple Intel-format JSON policies.""" + ... + + @staticmethod + def with_rego( + policy_jsons: List[str], rego_source: str + ) -> "PyRegoPolicySet": + """Create a Rego policy set with a custom Rego script.""" + ... + + +class PyQuoteVerificationResult: + """Intermediate result from crypto verification (phase 1). + + Use ``validate(policy)`` to apply a policy and get a ``PyVerifiedReport``. + Use ``into_report_unchecked()`` to skip policy validation (dangerous). + + The result is consumed on validate/into_report_unchecked — calling twice raises ValueError. + """ + + def validate( + self, policy: Union[PySimplePolicy, PyRegoPolicy, PyRegoPolicySet] + ) -> PyVerifiedReport: + """Validate against a policy, returning a VerifiedReport. Consumes the result. + + Args: + policy: Verification policy + + Returns: + PyVerifiedReport containing verification status + + Raises: + ValueError: If result already consumed or policy validation fails + """ + ... + + def into_report_unchecked(self) -> PyVerifiedReport: + """Get VerifiedReport without policy validation. Consumes the result. + + WARNING: Skips all policy checks. Use only when you handle validation externally. + + Returns: + PyVerifiedReport containing verification status + + Raises: + ValueError: If result already consumed + """ + ... + + class PyQuote: """ Represents a parsed SGX or TDX quote. @@ -420,13 +552,13 @@ class PyQuote: def py_verify( raw_quote: bytes, collateral: PyQuoteCollateralV3, now_secs: int -) -> PyVerifiedReport: +) -> PyQuoteVerificationResult: """ - Verify an SGX or TDX quote with the provided collateral data. + Verify an SGX or TDX quote (crypto only, phase 1). - This function performs cryptographic verification of the quote against - the provided collateral information, checking certificates, signatures, - and revocation status. + Performs cryptographic verification of the quote against the provided + collateral. Returns a QuoteVerificationResult that must be validated + with a policy via .validate(policy) to get a VerifiedReport. Args: raw_quote: Raw quote data as bytes (SGX or TDX format) @@ -434,11 +566,10 @@ def py_verify( now_secs: Current timestamp in seconds since Unix epoch for time-based checks Returns: - PyVerifiedReport containing verification status and advisory information + PyQuoteVerificationResult: use .validate(policy) to get VerifiedReport Raises: - ValueError: If verification fails due to invalid data, expired certificates, - revoked keys, or other verification errors + ValueError: If cryptographic verification fails """ ... @@ -446,10 +577,10 @@ def py_verify_with_root_ca( raw_quote: bytes, collateral: PyQuoteCollateralV3, root_ca_der: bytes, - now_secs: int -) -> PyVerifiedReport: + now_secs: int, +) -> PyQuoteVerificationResult: """ - Verify an SGX or TDX quote with the provided collateral data and custom root CA. + Verify an SGX or TDX quote with custom root CA (crypto only, phase 1). Args: raw_quote: Raw quote data as bytes (SGX or TDX format) @@ -458,10 +589,10 @@ def py_verify_with_root_ca( now_secs: Current timestamp in seconds since Unix epoch for time-based checks Returns: - PyVerifiedReport containing verification status and advisory information + PyQuoteVerificationResult: use .validate(policy) to get VerifiedReport Raises: - ValueError: If verification fails + ValueError: If cryptographic verification fails """ ... diff --git a/python-bindings/test_case.py b/python-bindings/test_case.py index a2ed20b..563d63c 100755 --- a/python-bindings/test_case.py +++ b/python-bindings/test_case.py @@ -83,9 +83,10 @@ def cmd_verify(args): # Use production Intel root CA result = dcap_qvl.verify(quote_bytes, collateral_obj, now_secs) - # Verification successful + # Verification successful — get report without policy (test harness) + report = result.into_report_unchecked() print("Verification successful") - print(f"Status: {result.status}") + print(f"Status: {report.status}") return 0 except Exception as e: diff --git a/python-bindings/tests/test_python_bindings.py b/python-bindings/tests/test_python_bindings.py index 07f0973..3087c2b 100644 --- a/python-bindings/tests/test_python_bindings.py +++ b/python-bindings/tests/test_python_bindings.py @@ -83,6 +83,58 @@ def test_verify_with_invalid_quote(self): dcap_qvl.verify(invalid_quote, collateral, 1234567890) +class TestRegoPolicies: + """Test Rego policy bindings.""" + + def test_rego_policy_constructor(self): + """Test creating a RegoPolicy from valid JSON.""" + policy_json = json.dumps( + { + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570", + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + }, + } + ) + + policy = dcap_qvl.RegoPolicy(policy_json) + assert isinstance(policy, dcap_qvl.RegoPolicy) + + def test_rego_policy_set_constructor(self): + """Test creating a RegoPolicySet from valid JSON policies.""" + policy_json = json.dumps( + { + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570", + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + }, + } + ) + + policies = dcap_qvl.RegoPolicySet([policy_json]) + assert isinstance(policies, dcap_qvl.RegoPolicySet) + + def test_rego_policy_missing_class_id(self): + """Test that missing class_id is rejected.""" + policy_json = json.dumps( + { + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + }, + } + ) + + with pytest.raises(ValueError): + dcap_qvl.RegoPolicy(policy_json) + + @pytest.mark.skipif( os.getenv("DCAP_QVL_RUN_SAMPLE_VERIFY") != "1", reason="Sample verify is an integration test. Set DCAP_QVL_RUN_SAMPLE_VERIFY=1 to run.", @@ -107,10 +159,80 @@ def test_verify_with_sample_data(self): collateral = dcap_qvl.QuoteCollateralV3.from_json(json.dumps(collateral_json)) - # Note: We use a timestamp that might make the test pass - # In a real scenario, you'd use the current time or a known good time - result = dcap_qvl.verify(quote_data, collateral, 1234567890) + # Phase 1: crypto verification + qvr = dcap_qvl.verify(quote_data, collateral, 1234567890) + assert isinstance(qvr, dcap_qvl.QuoteVerificationResult) + + # Phase 2: policy validation + policy = dcap_qvl.SimplePolicy.strict(1234567890) + result = qvr.validate(policy) assert isinstance(result, dcap_qvl.VerifiedReport) assert isinstance(result.status, str) assert isinstance(result.advisory_ids, list) + + def test_validate_with_rego_policy(self): + """Test validation with RegoPolicy using sample SGX quote.""" + if not Path("sample/sgx_quote").exists() or not Path( + "sample/sgx_quote_collateral.json" + ).exists(): + pytest.skip("Sample files not available") + + with open("sample/sgx_quote", "rb") as f: + quote_data = f.read() + + with open("sample/sgx_quote_collateral.json", "r") as f: + collateral_json = json.load(f) + + collateral = dcap_qvl.QuoteCollateralV3.from_json(json.dumps(collateral_json)) + qvr = dcap_qvl.verify(quote_data, collateral, 1234567890) + + policy_json = json.dumps( + { + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570", + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + }, + } + ) + policy = dcap_qvl.RegoPolicy(policy_json) + result = qvr.validate(policy) + + assert isinstance(result, dcap_qvl.VerifiedReport) + assert isinstance(result.status, str) + + def test_validate_with_rego_policy_set(self): + """Test validation with RegoPolicySet using sample SGX quote.""" + if not Path("sample/sgx_quote").exists() or not Path( + "sample/sgx_quote_collateral.json" + ).exists(): + pytest.skip("Sample files not available") + + with open("sample/sgx_quote", "rb") as f: + quote_data = f.read() + + with open("sample/sgx_quote_collateral.json", "r") as f: + collateral_json = json.load(f) + + collateral = dcap_qvl.QuoteCollateralV3.from_json(json.dumps(collateral_json)) + qvr = dcap_qvl.verify(quote_data, collateral, 1234567890) + + policy_json = json.dumps( + { + "environment": { + "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570", + }, + "reference": { + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + }, + } + ) + policy = dcap_qvl.RegoPolicySet([policy_json]) + result = qvr.validate(policy) + + assert isinstance(result, dcap_qvl.VerifiedReport) + assert isinstance(result.status, str) diff --git a/rego/qal_script.rego b/rego/qal_script.rego new file mode 100644 index 0000000..adff4c7 --- /dev/null +++ b/rego/qal_script.rego @@ -0,0 +1,1038 @@ +package dcap.quote.appraisal + +import future.keywords.contains +import future.keywords.every +import future.keywords.if +import future.keywords.in + +# +# Constant value of each class id +# +sgx_id := "3123ec35-8d38-4ea5-87a5-d6c48b567570" + +enclave_id := "bef7cb8c-31aa-42c1-854c-10db005d5c41" + +tdx10_id := "9eec018b-7481-4b1c-8e1a-9f7c0c8c777f" + +tdx15_id := "f708b97f-0fb2-4e6b-8b03-8a5bcd1221d3" + +tdqe_id := "3769258c-75e6-4bc7-8d72-d2b0e224cad2" + +guest_td10_id := "a1e4ee9c-a12e-48ac-bed0-e3f89297f687" + +guest_td15_id := "45b734fc-aa4e-4c3d-ad28-e43d08880e68" + +# +# UINT64_MAX for checking time.parse_rfc3339_ns return value +# +uint64_max := 18446744073709551615 + +# +# Utility rule to get matched report and policy based on class_id +# +collect_bundle[id] := bundle if { + some report in input.qvl_result + some policy in input.policies.policy_array + is_string(report.environment.class_id) + is_string(policy.environment.class_id) + lower(report.environment.class_id) == lower(policy.environment.class_id) + id := report.environment.class_id + bundle := {"report": report, "policy": policy} +} + +report_in_policy contains id if { + some report in input.qvl_result + some policy in input.policies.policy_array + is_string(report.environment.class_id) + is_string(policy.environment.class_id) + lower(report.environment.class_id) == lower(policy.environment.class_id) + id := report.environment.class_id +} + +# +# Utility rule to get report which doesn't has corresponding policy +# +report_not_in_policy contains report if { + some report in input.qvl_result + is_string(report.environment.class_id) + id := lower(report.environment.class_id) + not report_in_policy[id] +} + +# +# Utility rule to get quote hash +# +quote_hash contains hash if { + some qh in input.qvl_result + is_string(qh.quote_hash) + is_string(qh.algo) + hash := qh +} + +# +# Utility rule to get optional user data +# +optional_ud contains user_data if { + some ud in input.qvl_result + is_string(ud.user_data) + user_data := ud.user_data +} + +# +# Section 1: Format the final appraisal output +# +final_appraisal_result contains output if { + count(quote_hash) != 0 + count(optional_ud) != 0 + some user_data + user_data_str := optional_ud[user_data] + output := { + "overall_appraisal_result": final_ret, + "appraisal_check_date": time.now_ns(), + "nonce": rand.intn("appraisal", 1000000000000000), + "quote_hash": quote_hash, + "user_data": user_data_str, + "appraised_reports": appraisal_result, + "certification_data": certification_data, + } +} + +final_appraisal_result contains output if { + count(quote_hash) == 0 + count(optional_ud) == 0 + output := { + "overall_appraisal_result": final_ret, + "appraisal_check_date": time.now_ns(), + "nonce": rand.intn("appraisal", 1000000000000000), + "appraised_reports": appraisal_result, + "certification_data": certification_data, + } +} + +final_appraisal_result contains output if { + count(quote_hash) != 0 + count(optional_ud) == 0 + output := { + "overall_appraisal_result": final_ret, + "appraisal_check_date": time.now_ns(), + "nonce": rand.intn("appraisal", 1000000000000000), + "quote_hash": quote_hash, + "appraised_reports": appraisal_result, + "certification_data": certification_data, + } +} + +final_appraisal_result contains output if { + count(quote_hash) == 0 + count(optional_ud) != 0 + some user_data + user_data_str := optional_ud[user_data] + output := { + "overall_appraisal_result": final_ret, + "appraisal_check_date": time.now_ns(), + "nonce": rand.intn("appraisal", 1000000000000000), + "user_data": user_data_str, + "appraised_reports": appraisal_result, + "certification_data": certification_data, + } +} + +# Get final appraisal return value +default final_ret := 0 + +final_ret := 1 if { + count(appraisal_result) > 0 + every output in appraisal_result { + output.appraisal_result == 1 + } +} else := 0 if { + count(appraisal_result) > 0 + some output in appraisal_result + output.appraisal_result == 0 +} else := -1 if { + count(appraisal_result) > 0 + every output in appraisal_result { + output.appraisal_result != 0 + } + some ret in appraisal_result + ret.appraisal_result == -1 +} + +# +# Section 2: Try to get appraisal result for each report and corresponding policy +# +# appraise report for TDX 1.5 platform +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == tdx15_id + appraise_ret := platform_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": platform_sub_ret(item), + } +} + +# +# appraise report for TDX 1.0 platform +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == tdx10_id + appraise_ret := platform_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": platform_sub_ret(item), + } +} + +# +# appraise report for TD QE +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == tdqe_id + appraise_ret := td_qe_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": td_qe_sub_ret(item), + } +} + +# +# appraise report for guest TD 1.5 +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == guest_td15_id + appraise_ret := td_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": td_sub_ret(item), + } +} + +# +# appraise report for guest TD 1.0 +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == guest_td10_id + appraise_ret := td_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": td_sub_ret(item), + } +} + +# +# appraise report for SGX platform +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == sgx_id + appraise_ret := platform_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": platform_sub_ret(item), + } +} + +# +# appraise report for SGX enclave +# +appraisal_result contains appraisal_output if { + some item in collect_bundle + item.report.environment.class_id == enclave_id + appraise_ret := enclave_appraisal_ret(item) + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + "policy": item.policy, + "detailed_result": enclave_sub_ret(item), + } +} + +# +# appraise report for those report which doesn't have policy +# +appraisal_result contains appraisal_output if { + some item in report_not_in_policy + appraise_ret := -1 + + appraisal_output := { + "appraisal_result": appraise_ret, + "report": {"environment": item.report.environment, "measurement": item.report.measurement}, + } +} + +# +# Extract certification data from QVL report +# Suppose QVL report should always has one certification data +# +certification_data contains cert_data if { + some item in collect_bundle + + cert_data := item.report.certification_data +} + +# +# Section 3: Platform TCB appraisal +# +# Check to see if any in a set of status are not in the policy +default unaccepted_tcb_status_present(_) := false + +# tcb status check fail in below cases +# a. accepted_tcb_status is a single string, and tcb_status is not same between policy and report +# b. accepted_tcb_status is an array of string, and one of tcb_status in report is not in accpeted_tcb_status +unaccepted_tcb_status_present(bundle) if { + some status in bundle.report.measurement.tcb_status + is_string(status) + is_string(bundle.policy.reference.accepted_tcb_status) + upper(status) != upper(bundle.policy.reference.accepted_tcb_status) +} + +unaccepted_tcb_status_present(bundle) if { + # support user to input string or 'array of string' + is_array(bundle.policy.reference.accepted_tcb_status) + upper_accepted_tcb := [val | + some status in bundle.policy.reference.accepted_tcb_status + val := upper(status) + ] + some status in bundle.report.measurement.tcb_status + not upper(status) in upper_accepted_tcb +} + +default tcb_status_present(_) := false + +tcb_status_present(bundle) if { + # only accept array of string in QVL output + is_array(bundle.report.measurement.tcb_status) +} + +default tcb_uptodate_check(_) := false + +tcb_uptodate_check(bundle) if { + is_array(bundle.policy.reference.accepted_tcb_status) + + # tcb status must have UpToDate + basic_status := "UPTODATE" + upper_accepted_tcb := [val | + some status in bundle.policy.reference.accepted_tcb_status + val := upper(status) + ] + basic_status in upper_accepted_tcb +} + +tcb_uptodate_check(bundle) if { + is_string(bundle.policy.reference.accepted_tcb_status) + + # tcb status must have UpToDate + basic_status := "UPTODATE" + basic_status == upper(bundle.policy.reference.accepted_tcb_status) +} + +# Appraise required tcb_status - this must not contain any strings that are not in the policy +default tcb_status_ok(_) := false + +# tcb status is OK if none of the individual status are rejected and the input contains a tcb_status +tcb_status_ok(bundle) if { + tcb_status_present(bundle) + tcb_uptodate_check(bundle) + not unaccepted_tcb_status_present(bundle) +} + +# Appraise required platform_tcb expiration_date_check +# if policy.reference.collateral_grace_period is provided, +# then the platform TCB earliest_expiration_date must be within the grace period +default expiration_date_check_ok(_) := false + +expiration_date_check_ok(bundle) if { + not bundle.policy.reference.collateral_grace_period +} + +# If user defines collateral_grace_period +# min_eval_num must not be present +expiration_date_check_ok(bundle) if { + is_number(bundle.policy.reference.collateral_grace_period) + not bundle.policy.reference.min_eval_num + + # Convert grace period from seconds to ns + grace_period := bundle.policy.reference.collateral_grace_period * 1000000000 + expiration_date := time.parse_rfc3339_ns(bundle.report.measurement.earliest_expiration_date) + expiration_date != uint64_max + expiration_date + grace_period >= time.now_ns() +} + +# Appraise platform_tcb.tcb_level_date_tag +# if policies.reference.platform_grace_period is provided, then +# the platform_tcb.tcb_level_date_tag must be within the grace period +default earliest_accepted_tcb_level_date_tag_ok(_) := false + +tcb_level_date_tag_basic_check(bundle) if { + is_number(bundle.policy.reference.platform_grace_period) + is_number(bundle.policy.reference.collateral_grace_period) + bundle.policy.reference.collateral_grace_period == 0 + not bundle.policy.reference.min_eval_num + basic_status := ["UPTODATE", "OUTOFDATE"] + is_array(bundle.policy.reference.accepted_tcb_status) + upper_accepted_tcb := [val | + some status in bundle.policy.reference.accepted_tcb_status + val := upper(status) + ] + every status in basic_status { + status in upper_accepted_tcb + } +} + +earliest_accepted_tcb_level_date_tag_ok(bundle) if { + not bundle.policy.reference.platform_grace_period +} + +# If current TCB status in report is one of "UpToDate", "ConfigurationNeeded", "SWHardeningNeeded" or "TDRelaunchAdvised" +# and collateral has no expiry, then ignore the check +earliest_accepted_tcb_level_date_tag_ok(bundle) if { + tcb_level_date_tag_basic_check(bundle) + expiration_date_check_ok(bundle) + ignored_status := ["UPTODATE", "CONFIGURATIONNEEDED", "SWHARDENINGNEEDED", "TDRELAUNCHADVISED"] + every status in bundle.report.measurement.tcb_status { + upper(status) in ignored_status + } +} + +# If user defines platform_grace_period, then collateral_grace_period must be 0 +# accepted_tcb_status must include UpToDate and OutOfDate +# min_eval_num must not be present +earliest_accepted_tcb_level_date_tag_ok(bundle) if { + tcb_level_date_tag_basic_check(bundle) + grace_period := bundle.policy.reference.platform_grace_period * 1000000000 + expiration_date := time.parse_rfc3339_ns(bundle.report.measurement.tcb_level_date_tag) + expiration_date != uint64_max + expiration_date + grace_period >= time.now_ns() +} + +# Appriasal platform_tcb tcb_level_date_tag +# if policies.reference.min_tcb_level_date is provided, then +# the platform_tcb.tcb_level_date_tag must not be before the policy +# min_tcb_level_date +default accepted_tcb_level_date_tag_ok(_) := false + +accepted_tcb_level_date_tag_ok(bundle) if { + not bundle.policy.sgx_platform.reference.min_tcb_level_date +} + +accepted_tcb_level_date_tag_ok(bundle) if { + is_string(bundle.policy.reference.min_tcb_level_date) + min_tcb_date := time.parse_rfc3339_ns(bundle.policy.reference.min_tcb_level_date) + min_tcb_date != uint64_max + tcb_level_date := time.parse_rfc3339_ns(bundle.report.measurement.tcb_level_date_tag) + tcb_level_date != uint64_max + tcb_level_date >= min_tcb_date +} + +# Appraise optional platform_tcb tcb_eval_num +default tcb_eval_num_ok(_) := false + +tcb_eval_num_ok(bundle) if { + not bundle.policy.reference.min_eval_num +} + +# If user defines min_eval_num, then platform_grace_period must not be present +# collateral_grace_period also must not be present +# accepted_tcb_status must include UpToDate +tcb_eval_num_ok(bundle) if { + is_number(bundle.report.measurement.tcb_eval_num) + is_number(bundle.policy.reference.min_eval_num) + not bundle.policy.reference.platform_grace_period + not bundle.policy.reference.collateral_grace_period + bundle.report.measurement.tcb_eval_num >= bundle.policy.reference.min_eval_num +} + +# Appraise optional platform_tcb platform_provider_id +default platform_provider_id_ok(_) := false + +platform_provider_id_ok(bundle) if { + not bundle.policy.reference.accepted_platform_provider_ids +} + +platform_provider_id_ok(bundle) if { + some provider_id in bundle.policy.reference.accepted_platform_provider_ids + is_string(provider_id) + is_string(bundle.report.measurement.platform_provider_id) + lower(provider_id) == lower(bundle.report.measurement.platform_provider_id) +} + +# Appraise sgx_type - all required_sgx_type in policy should not be missing +# sgx_type has swtiched from string to integer (0, 1, 2) +# Suppose sgx_type in QVL output should be one of 0, 1, 2 +default sgx_types_ok(_) := false + +sgx_types_ok(bundle) if { + not bundle.policy.reference.accepted_sgx_types +} + +sgx_types_ok(bundle) if { + is_array(bundle.policy.reference.accepted_sgx_types) + is_number(bundle.report.measurement.sgx_type) + bundle.report.measurement.sgx_type in bundle.policy.reference.accepted_sgx_types +} + +sgx_types_ok(bundle) if { + is_number(bundle.policy.reference.accepted_sgx_types) + is_number(bundle.report.measurement.sgx_type) + bundle.report.measurement.sgx_type == bundle.policy.reference.accepted_sgx_types +} + +# Appraise dynamic_platform, only fail in below situation +# policy 'allow_dynamic_platform = false' AND report 'dynamic_platform = true' +default dynamic_platform_ok(_) := false + +dynamic_platform_ok(bundle) if { + not dynamic_platform_fail(bundle) +} + +default dynamic_platform_fail(_) := false + +dynamic_platform_fail(bundle) if { + bundle.report.measurement.is_dynamic_platform + bundle.policy.reference.allow_dynamic_platform == false +} + +# Appraise cached_keys, only fail in below situation +# policy 'allow_cached_keys = false' AND report 'cached_keys = true' +default cached_keys_ok(_) := false + +cached_keys_ok(bundle) if { + not cached_keys_fail(bundle) +} + +default cached_keys_fail(_) := false + +cached_keys_fail(bundle) if { + bundle.report.measurement.cached_keys + bundle.policy.reference.allow_cached_keys == false +} + +# Appraise smt_enabled, only fail in below situation +# policy 'allow_smt_enabled = false' AND report 'smt_enabled = true' +default smt_enabled_ok(_) := false + +smt_enabled_ok(bundle) if { + not smt_enabled_fail(bundle) +} + +default smt_enabled_fail(_) := false + +smt_enabled_fail(bundle) if { + bundle.report.measurement.smt_enabled + bundle.policy.reference.allow_smt_enabled == false +} + +# Appraise optional platform_tcb advisory_ids +default advisory_ids_ok(_) := false + +advisory_ids_ok(bundle) if { + not advisory_ids_rejected(bundle) +} + +advisory_ids_rejected(bundle) if { + some report_id in bundle.report.measurement.advisory_ids + some policy_id in bundle.policy.reference.rejected_advisory_ids + upper(report_id) == upper(policy_id) +} + +default platform_tcb_policy_present(_) := false + +platform_tcb_policy_present(bundle) if { + bundle.policy + lower(bundle.policy.environment.class_id) == lower(bundle.report.environment.class_id) +} + +# Sum up platform TCB appraisal +default platform_appraisal_ret(_) := 0 + +platform_appraisal_ret(bundle) := -1 if { + not platform_tcb_policy_present(bundle) +} else := 1 if { + tcb_status_ok(bundle) + expiration_date_check_ok(bundle) + earliest_accepted_tcb_level_date_tag_ok(bundle) + accepted_tcb_level_date_tag_ok(bundle) + tcb_eval_num_ok(bundle) + platform_provider_id_ok(bundle) + dynamic_platform_ok(bundle) + cached_keys_ok(bundle) + smt_enabled_ok(bundle) + advisory_ids_ok(bundle) + sgx_types_ok(bundle) +} else := 0 + +# Try to output return value for each platform sub function +platform_sub_ret(bundle) := {{ + "tcb_status_check": tcb_status_ok(bundle), + "expiration_date_check": expiration_date_check_ok(bundle), + "earliest_accepted_tcb_level_date_tag_check": earliest_accepted_tcb_level_date_tag_ok(bundle), + "accepted_tcb_level_date_tag_check": accepted_tcb_level_date_tag_ok(bundle), + "tcb_eval_num_check": tcb_eval_num_ok(bundle), + "platform_provider_id_check": platform_provider_id_ok(bundle), + "dynamic_platform_check": dynamic_platform_ok(bundle), + "cached_keys_check": cached_keys_ok(bundle), + "smt_enabled_check": smt_enabled_ok(bundle), + "advisory_ids_check": advisory_ids_ok(bundle), + "sgx_types_check": sgx_types_ok(bundle), +}} + +# +# Section 4: TD QE appraisal, reuse part of functions in platform appraisal +# +default td_qe_policy_present(_) := false + +td_qe_policy_present(bundle) if { + bundle.policy + lower(bundle.policy.environment.class_id) == lower(bundle.report.environment.class_id) +} + +# Sum up platform TCB appraisal +default td_qe_appraisal_ret(_) := 0 + +td_qe_appraisal_ret(bundle) := -1 if { + not td_qe_policy_present(bundle) +} else := 1 if { + tcb_status_ok(bundle) + expiration_date_check_ok(bundle) + earliest_accepted_tcb_level_date_tag_ok(bundle) + accepted_tcb_level_date_tag_ok(bundle) + tcb_eval_num_ok(bundle) +} else := 0 + +# Try to output return value for each platform sub function +td_qe_sub_ret(bundle) := {{ + "td_qe_tcb_status_check": tcb_status_ok(bundle), + "td_qe_expiration_date_check": expiration_date_check_ok(bundle), + "td_qe_earliest_accepted_tcb_level_date_tag_check": earliest_accepted_tcb_level_date_tag_ok(bundle), + "td_qe_accepted_tcb_level_date_tag_check": accepted_tcb_level_date_tag_ok(bundle), + "td_qe_tcb_eval_num_check": tcb_eval_num_ok(bundle), +}} + +# +# Section 5: application enclave appraisal +# +default application_enclave_tcb_policy_present(_) := false + +application_enclave_tcb_policy_present(bundle) if { + bundle.policy + lower(bundle.policy.environment.class_id) == lower(bundle.report.environment.class_id) +} + +# Appraise optional enclave_identity miscselect +default miscselect_ok(_) := false + +miscselect_ok(bundle) if { + not bundle.policy.reference.sgx_miscselect +} + +miscselect_ok(bundle) if { + hex2int := { + "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, + "8": 8, "9": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, + } + value := split(upper(bundle.report.measurement.sgx_miscselect), "") + policy := split(upper(bundle.policy.reference.sgx_miscselect), "") + mask := split(upper(bundle.policy.reference.sgx_miscselect_mask), "") + equal_num := count({i | + mask[i] + bits.and(hex2int[value[i]], hex2int[mask[i]]) == bits.and(hex2int[policy[i]], hex2int[mask[i]]) + }) + orig_num := count(mask) + equal_num == orig_num +} + +# Appraise required enclave_identity attributes +default attributes_ok(_) := false + +attributes_ok(bundle) if { + hex2int := { + "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, + "8": 8, "9": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, + } + value := split(upper(bundle.report.measurement.sgx_attributes), "") + policy := split(upper(bundle.policy.reference.sgx_attributes), "") + mask := split(upper(bundle.policy.reference.sgx_attributes_mask), "") + equal_num := count({i | + mask[i] + bits.and(hex2int[value[i]], hex2int[mask[i]]) == bits.and(hex2int[policy[i]], hex2int[mask[i]]) + }) + orig_num := count(mask) + equal_num == orig_num +} + +# Appraise optional enclave_identity ce_attributes +default ce_attributes_ok(_) := false + +ce_attributes_ok(bundle) if { + not bundle.policy.reference.sgx_ce_attributes +} + +ce_attributes_ok(bundle) if { + hex2int := { + "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, + "8": 8, "9": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, + } + value := split(upper(bundle.report.measurement.sgx_ce_attributes), "") + policy := split(upper(bundle.policy.reference.sgx_ce_attributes), "") + mask := split(upper(bundle.policy.reference.sgx_ce_attributes_mask), "") + equal_num := count({i | + mask[i] + bits.and(hex2int[value[i]], hex2int[mask[i]]) == bits.and(hex2int[policy[i]], hex2int[mask[i]]) + }) + orig_num := count(mask) + equal_num == orig_num +} + +default mrenclave_ok(_) := false + +mrenclave_ok(bundle) if { + not bundle.policy.reference.sgx_mrenclave +} + +mrenclave_ok(bundle) if { + lower(bundle.report.measurement.sgx_mrenclave) == lower(bundle.policy.reference.sgx_mrenclave) +} + +default mrsigner_ok(_) := false + +mrsigner_ok(bundle) if { + not bundle.policy.reference.sgx_mrsigner +} + +mrsigner_ok(bundle) if { + lower(bundle.report.measurement.sgx_mrsigner) == lower(bundle.policy.reference.sgx_mrsigner) +} + +default isvprod_id_ok(_) := false + +isvprod_id_ok(bundle) if { + not bundle.policy.reference.sgx_isvprodid +} + +isvprod_id_ok(bundle) if { + is_number(bundle.report.measurement.sgx_isvprodid) + is_number(bundle.policy.reference.sgx_isvprodid) + bundle.report.measurement.sgx_isvprodid == bundle.policy.reference.sgx_isvprodid +} + +default isvsvn_ok(_) := false + +isvsvn_ok(bundle) if { + not bundle.policy.reference.sgx_isvsvn_min +} + +isvsvn_ok(bundle) if { + is_number(bundle.report.measurement.sgx_isvsvn) + is_number(bundle.policy.reference.sgx_isvsvn_min) + bundle.report.measurement.sgx_isvsvn >= bundle.policy.reference.sgx_isvsvn_min +} + +# Appraise optional kss fields: configid, configsvn_min, isvextprodid, isvfamilyid +default configid_ok(_) := false + +configid_ok(bundle) if { + not bundle.policy.reference.sgx_configid +} + +configid_ok(bundle) if { + is_string(bundle.report.measurement.sgx_configid) + is_string(bundle.policy.reference.sgx_configid) + lower(bundle.report.measurement.sgx_configid) == lower(bundle.policy.reference.sgx_configid) +} + +default configsvn_ok(_) := false + +configsvn_ok(bundle) if { + not bundle.policy.reference.sgx_configsvn_min +} + +configsvn_ok(bundle) if { + is_number(bundle.report.measurement.sgx_configsvn) + is_number(bundle.policy.reference.sgx_configsvn_min) + bundle.report.measurement.sgx_configsvn >= bundle.policy.reference.sgx_configsvn_min +} + +default isvextprodid_ok(_) := false + +isvextprodid_ok(bundle) if { + not bundle.policy.reference.sgx_isvextprodid +} + +isvextprodid_ok(bundle) if { + is_string(bundle.report.measurement.sgx_isvextprodid) + is_string(bundle.policy.reference.sgx_isvextprodid) + lower(bundle.report.measurement.sgx_isvextprodid) == lower(bundle.policy.reference.sgx_isvextprodid) +} + +default isvfamilyid_ok(_) := false + +isvfamilyid_ok(bundle) if { + not bundle.policy.reference.sgx_isvfamilyid +} + +isvfamilyid_ok(bundle) if { + is_string(bundle.report.measurement.sgx_isvfamilyid) + is_string(bundle.policy.reference.sgx_isvfamilyid) + lower(bundle.report.measurement.sgx_isvfamilyid) == bundle.policy.reference.sgx_isvfamilyid +} + +# Sum up enclave appraisal +default enclave_appraisal_ret(_) := 0 + +enclave_appraisal_ret(bundle) := -1 if { + not application_enclave_tcb_policy_present(bundle) +} else := 1 if { + miscselect_ok(bundle) + attributes_ok(bundle) + ce_attributes_ok(bundle) + mrenclave_ok(bundle) + mrsigner_ok(bundle) + isvprod_id_ok(bundle) + isvsvn_ok(bundle) + configid_ok(bundle) + configsvn_ok(bundle) + isvextprodid_ok(bundle) + isvfamilyid_ok(bundle) +} else := 0 + +# Try to output return value for each enclave sub function +enclave_sub_ret(bundle) := {{ + "sgx_miscselcect_check": miscselect_ok(bundle), + "sgx_attributes_check": attributes_ok(bundle), + "sgx_ce_attributes_check": ce_attributes_ok(bundle), + "sgx_mrenclave_check": mrenclave_ok(bundle), + "sgx_mrsigner_check": mrsigner_ok(bundle), + "sgx_isvprod_id_check": isvprod_id_ok(bundle), + "sgx_isvsvn_check": isvsvn_ok(bundle), + "sgx_configid_check": configid_ok(bundle), + "sgx_configsvn_check": configsvn_ok(bundle), + "sgx_isvextprodid_check": isvextprodid_ok(bundle), + "sgx_isvfamilyid_check": isvfamilyid_ok(bundle), +}} + +# +# Section 6: TD appraisal +# +default td_tcb_policy_present(_) := false + +td_tcb_policy_present(bundle) if { + bundle.policy + lower(bundle.policy.environment.class_id) == lower(bundle.report.environment.class_id) +} + +# Appraise required guest td attributes +default td_attributes_ok(_) := false + +td_attributes_ok(bundle) if { + not bundle.policy.reference.tdx_attributes + not bundle.policy.reference.tdx_attributes_mask +} + +td_attributes_ok(bundle) if { + hex2int := { + "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, + "8": 8, "9": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, + } + value := split(upper(bundle.report.measurement.tdx_attributes), "") + policy := split(upper(bundle.policy.reference.tdx_attributes), "") + mask := split(upper(bundle.policy.reference.tdx_attributes_mask), "") + equal_num := count({i | + mask[i] + bits.and(hex2int[value[i]], hex2int[mask[i]]) == bits.and(hex2int[policy[i]], hex2int[mask[i]]) + }) + orig_num := count(mask) + equal_num == orig_num +} + +# Appraise optional guest td xfam +default td_xfam_ok(_) := false + +td_xfam_ok(bundle) if { + not bundle.policy.reference.tdx_xfam + not bundle.policy.reference.tdx_xfam_mask +} + +td_xfam_ok(bundle) if { + hex2int := { + "0": 0, "1": 1, "2": 2, "3": 3, "4": 4, "5": 5, "6": 6, "7": 7, + "8": 8, "9": 9, "A": 10, "B": 11, "C": 12, "D": 13, "E": 14, "F": 15, + } + value := split(upper(bundle.report.measurement.tdx_xfam), "") + policy := split(upper(bundle.policy.reference.tdx_xfam), "") + mask := split(upper(bundle.policy.reference.tdx_xfam_mask), "") + equal_num := count({i | + mask[i] + bits.and(hex2int[value[i]], hex2int[mask[i]]) == bits.and(hex2int[policy[i]], hex2int[mask[i]]) + }) + orig_num := count(mask) + equal_num == orig_num +} + +# Appraise guest td tdx_mrconfigid, tdx_mrowner, tdx_mrownerconfig and tdx_mrtd +default td_mrconfigid_ok(_) := false + +td_mrconfigid_ok(bundle) if { + not bundle.policy.reference.tdx_mrconfigid +} + +td_mrconfigid_ok(bundle) if { + is_string(bundle.report.measurement.tdx_mrconfigid) + is_string(bundle.policy.reference.tdx_mrconfigid) + lower(bundle.report.measurement.tdx_mrconfigid) == lower(bundle.policy.reference.tdx_mrconfigid) +} + +default td_mrowner_ok(_) := false + +td_mrowner_ok(bundle) if { + not bundle.policy.reference.tdx_mrowner +} + +td_mrowner_ok(bundle) if { + is_string(bundle.report.measurement.tdx_mrowner) + is_string(bundle.policy.reference.tdx_mrowner) + lower(bundle.report.measurement.tdx_mrowner) == lower(bundle.policy.reference.tdx_mrowner) +} + +default td_mrownerconfig_ok(_) := false + +td_mrownerconfig_ok(bundle) if { + not bundle.policy.reference.tdx_mrownerconfig +} + +td_mrownerconfig_ok(bundle) if { + is_string(bundle.report.measurement.tdx_mrownerconfig) + is_string(bundle.policy.reference.tdx_mrownerconfig) + lower(bundle.report.measurement.tdx_mrownerconfig) == lower(bundle.policy.reference.tdx_mrownerconfig) +} + +default td_mrtd_ok(_) := false + +td_mrtd_ok(bundle) if { + not bundle.policy.reference.tdx_mrtd +} + +td_mrtd_ok(bundle) if { + is_string(bundle.report.measurement.tdx_mrtd) + is_string(bundle.policy.reference.tdx_mrtd) + lower(bundle.report.measurement.tdx_mrtd) == lower(bundle.policy.reference.tdx_mrtd) +} + +# Appraise optional rtmr 0~4 +default td_rtmr0_ok(_) := false + +td_rtmr0_ok(bundle) if { + not bundle.policy.reference.tdx_rtmr0 +} + +td_rtmr0_ok(bundle) if { + is_string(bundle.report.measurement.tdx_rtmr0) + is_string(bundle.policy.reference.tdx_rtmr0) + lower(bundle.report.measurement.tdx_rtmr0) == lower(bundle.policy.reference.tdx_rtmr0) +} + +default td_rtmr1_ok(_) := false + +td_rtmr1_ok(bundle) if { + not bundle.policy.reference.tdx_rtmr1 +} + +td_rtmr1_ok(bundle) if { + is_string(bundle.report.measurement.tdx_rtmr1) + is_string(bundle.policy.reference.tdx_rtmr1) + lower(bundle.report.measurement.tdx_rtmr1) == lower(bundle.policy.reference.tdx_rtmr1) +} + +default td_rtmr2_ok(_) := false + +td_rtmr2_ok(bundle) if { + not bundle.policy.reference.tdx_rtmr2 +} + +td_rtmr2_ok(bundle) if { + is_string(bundle.report.measurement.tdx_rtmr2) + is_string(bundle.policy.reference.tdx_rtmr2) + lower(bundle.report.measurement.tdx_rtmr2) == lower(bundle.policy.reference.tdx_rtmr2) +} + +default td_rtmr3_ok(_) := false + +td_rtmr3_ok(bundle) if { + not bundle.policy.reference.tdx_rtmr3 +} + +td_rtmr3_ok(bundle) if { + is_string(bundle.report.measurement.tdx_rtmr3) + is_string(bundle.policy.reference.tdx_rtmr3) + lower(bundle.report.measurement.tdx_rtmr3) == lower(bundle.policy.reference.tdx_rtmr3) +} + +# Appraise optional tdx mrservicetd, only available for TDX 1.5 +default td_mrservicetd_ok(_) := false + +td_mrservicetd_ok(bundle) if { + not bundle.policy.reference.tdx_mrservicetd +} + +td_mrservicetd_ok(bundle) if { + is_string(bundle.report.measurement.tdx_mrservicetd) + is_string(bundle.policy.reference.tdx_mrservicetd) + lower(bundle.report.measurement.tdx_mrservicetd) == lower(bundle.policy.reference.tdx_mrservicetd) +} + +# Sum up TD appraisal +default td_appraisal_ret(_) := 0 + +td_appraisal_ret(bundle) := -1 if { + not td_tcb_policy_present(bundle) +} else := 1 if { + td_attributes_ok(bundle) + td_xfam_ok(bundle) + td_mrconfigid_ok(bundle) + td_mrowner_ok(bundle) + td_mrownerconfig_ok(bundle) + td_mrtd_ok(bundle) + td_rtmr0_ok(bundle) + td_rtmr1_ok(bundle) + td_rtmr2_ok(bundle) + td_rtmr3_ok(bundle) + td_mrservicetd_ok(bundle) +} else := 0 + +# Try to output return value for each platform sub function +td_sub_ret(bundle) := {{ + "td_attributes_check": td_attributes_ok(bundle), + "td_xfam_check": td_xfam_ok(bundle), + "td_mrconfigid_check": td_mrconfigid_ok(bundle), + "td_mrowner_check": td_mrowner_ok(bundle), + "td_mrownerconfig_check": td_mrownerconfig_ok(bundle), + "td_mrtd_check": td_mrtd_ok(bundle), + "td_rtmr0_check": td_rtmr0_ok(bundle), + "td_rtmr1_check": td_rtmr1_ok(bundle), + "td_rtmr2_check": td_rtmr2_ok(bundle), + "td_rtmr3_check": td_rtmr3_ok(bundle), + "td_mrservicetd_check": td_mrservicetd_ok(bundle), +}} diff --git a/src/collateral.rs b/src/collateral.rs index ecb1684..fb443d1 100644 --- a/src/collateral.rs +++ b/src/collateral.rs @@ -423,7 +423,7 @@ pub async fn get_collateral_from_pcs(quote: &[u8]) -> Result get_collateral(INTEL_PCS_URL, quote).await } -/// Get collateral and verify the quote (uses ring backend). +/// Get collateral and verify the quote, returning [`QuoteVerificationResult`](crate::verify::QuoteVerificationResult). /// /// # Arguments /// @@ -433,7 +433,8 @@ pub async fn get_collateral_from_pcs(quote: &[u8]) -> Result pub async fn get_collateral_and_verify( quote: &[u8], pccs_url: Option<&str>, -) -> Result { +) -> Result { + use crate::verify::QuoteVerifier; use std::time::SystemTime; let pccs_url = pccs_url @@ -445,7 +446,7 @@ pub async fn get_collateral_and_verify( .duration_since(SystemTime::UNIX_EPOCH) .context("Failed to get current time")? .as_secs(); - crate::verify::verify(quote, &collateral, now) + QuoteVerifier::new_prod_default_crypto().verify(quote, collateral, now) } #[cfg(test)] diff --git a/src/constants.rs b/src/constants.rs index 63b068b..441c008 100644 --- a/src/constants.rs +++ b/src/constants.rs @@ -1,9 +1,14 @@ #![allow(dead_code)] +/// MR_SIGNER measurement (32 bytes) pub type MrSigner = [u8; 32]; +/// MR_ENCLAVE measurement (32 bytes) pub type MrEnclave = [u8; 32]; +/// FMSPC - Firmware Security Version & Package Configuration (6 bytes) pub type Fmspc = [u8; 6]; +/// CPU SVN - Security Version Number for CPU microcode (16 bytes) pub type CpuSvn = [u8; 16]; +/// SVN - Security Version Number (16-bit) pub type Svn = u16; pub const ATTESTATION_KEY_TYPE_ECDSA256_WITH_P256_CURVE: u16 = 2; diff --git a/src/ffi.rs b/src/ffi.rs index e9d8087..e9e608e 100644 --- a/src/ffi.rs +++ b/src/ffi.rs @@ -8,6 +8,7 @@ use serde::Serialize; use crate::intel; use crate::quote::{EnclaveReport, Header, Quote, Report, TDReport10, TDReport15}; +use crate::tcb_info::TcbStatusWithAdvisory; use crate::verify::{self, VerifiedReport}; use crate::QuoteCollateralV3; @@ -196,19 +197,25 @@ struct FfiVerifiedReport { report: FfiReport, #[serde(with = "serde_bytes")] ppid: Vec, - qe_status: crate::tcb_info::TcbStatusWithAdvisory, - platform_status: crate::tcb_info::TcbStatusWithAdvisory, + qe_status: TcbStatusWithAdvisory, + platform_status: TcbStatusWithAdvisory, } impl FfiVerifiedReport { fn from(vr: VerifiedReport) -> Self { + let qe_status = + TcbStatusWithAdvisory::new(vr.qe_tcb_level.tcb_status, vr.qe_tcb_level.advisory_ids); + let platform_status = TcbStatusWithAdvisory::new( + vr.platform_tcb_level.tcb_status, + vr.platform_tcb_level.advisory_ids, + ); Self { status: vr.status, advisory_ids: vr.advisory_ids, report: FfiReport::from_report(&vr.report), ppid: vr.ppid, - qe_status: vr.qe_status, - platform_status: vr.platform_status, + qe_status, + platform_status, } } } @@ -295,11 +302,12 @@ pub unsafe extern "C" fn dcap_parse_quote_cb( let quote_type = if parsed.header.is_sgx() { "SGX" } else { "TDX" }; let cert_chain_pem = parsed.raw_cert_chain().ok().map(|raw| { - let mut end = raw.len(); - while end > 0 && raw[end.saturating_sub(1)] == 0 { - end = end.saturating_sub(1); - } - String::from_utf8_lossy(&raw[..end]).into_owned() + let trimmed = raw + .iter() + .rposition(|byte| *byte != 0) + .and_then(|end| raw.get(..=end)) + .unwrap_or(&[]); + String::from_utf8_lossy(trimmed).into_owned() }); // For cert_type 5: extract fmspc/ca from embedded cert chain @@ -379,8 +387,9 @@ pub unsafe extern "C" fn dcap_verify_cb( } }; - let report = match verify::verify(quote_slice, &collateral, now_secs) { - Ok(r) => r, + let verifier = verify::QuoteVerifier::new_prod(verify::default_crypto::backend()); + let report = match verifier.verify(quote_slice, collateral, now_secs) { + Ok(qvr) => qvr.into_report_unchecked(), Err(e) => return emit_error(format_error(&e), cb, user_data), }; @@ -421,9 +430,8 @@ pub unsafe extern "C" fn dcap_verify_with_root_ca_cb( }; let verifier = verify::QuoteVerifier::new(root_ca.to_vec(), verify::default_crypto::backend()); - - let report = match verifier.verify(quote_slice, &collateral, now_secs) { - Ok(r) => r, + let report = match verifier.verify(quote_slice, collateral, now_secs) { + Ok(qvr) => qvr.into_report_unchecked(), Err(e) => return emit_error(format_error(&e), cb, user_data), }; @@ -541,9 +549,7 @@ pub unsafe extern "C" fn dcap_parse_pck_extension_from_pem_cb( pce_id: ext.pce_id.to_vec(), fmspc: ext.fmspc.to_vec(), sgx_type: ext.sgx_type, - platform_instance_id: ext - .platform_instance_id - .map(|v| serde_bytes::ByteBuf::from(v)), + platform_instance_id: ext.platform_instance_id.map(serde_bytes::ByteBuf::from), raw_extension: ext.raw_extension, }; diff --git a/src/intel.rs b/src/intel.rs index b9593cf..62fb0a4 100644 --- a/src/intel.rs +++ b/src/intel.rs @@ -11,7 +11,7 @@ use crate::{ utils, }; -/// Parsed values from the Intel SGX extension. +/// Parsed values from the Intel SGX extension in a PCK certificate. #[derive(Debug, Clone, PartialEq, Eq)] pub struct PckExtension { pub ppid: Vec, @@ -22,6 +22,15 @@ pub struct PckExtension { pub sgx_type: u64, pub platform_instance_id: Option>, pub raw_extension: Vec, + /// Whether the platform can be extended with additional packages + /// (Platform CA certs only; `None` for Processor CA certs) + pub dynamic_platform: Option, + /// Whether platform root keys are cached by SGX Registration Backend + /// (Platform CA certs only; `None` for Processor CA certs) + pub cached_keys: Option, + /// Whether SMT (simultaneous multithreading / hyperthreading) is enabled + /// (Platform CA certs only; `None` for Processor CA certs) + pub smt_enabled: Option, } impl PckExtension { @@ -74,6 +83,20 @@ pub fn parse_pck_extension(cert_der: &[u8]) -> Result { let sgx_type = decode_enumerated(&find_extension_required(&[oids::SGX_TYPE], &extension)?)?; let platform_instance_id = find_extension_optional(&[oids::PLATFORM_INSTANCE_ID], &extension)?; + // Configuration flags (only present in Platform CA certs, under OID 1.2.840.113741.1.13.1.7) + let dynamic_platform = + find_extension_optional(&[oids::CONFIGURATION, oids::DYNAMIC_PLATFORM], &extension)? + .map(|v| decode_boolean(&v)) + .transpose()?; + let cached_keys = + find_extension_optional(&[oids::CONFIGURATION, oids::CACHED_KEYS], &extension)? + .map(|v| decode_boolean(&v)) + .transpose()?; + let smt_enabled = + find_extension_optional(&[oids::CONFIGURATION, oids::SMT_ENABLED], &extension)? + .map(|v| decode_boolean(&v)) + .transpose()?; + Ok(PckExtension { ppid, cpu_svn, @@ -83,6 +106,9 @@ pub fn parse_pck_extension(cert_der: &[u8]) -> Result { sgx_type, platform_instance_id, raw_extension: extension, + dynamic_platform, + cached_keys, + smt_enabled, }) } @@ -176,6 +202,14 @@ fn find_recursive<'a>( Ok(None) } +fn decode_boolean(bytes: &[u8]) -> Result { + match bytes[..] { + [0x00] => Ok(false), + [_] => Ok(true), + _ => bail!("Unexpected BOOLEAN length: {}", bytes.len()), + } +} + fn decode_enumerated(bytes: &[u8]) -> Result { match bytes[..] { [byte0] => Ok(u64::from(byte0)), diff --git a/src/lib.rs b/src/lib.rs index cd13a36..c2be080 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,7 +18,8 @@ //! //! ```no_run //! use dcap_qvl::collateral::get_collateral; -//! use dcap_qvl::verify::verify; +//! use dcap_qvl::verify::{QuoteVerifier, ring}; +//! use dcap_qvl::SimplePolicy; //! use dcap_qvl::PHALA_PCCS_URL; //! //! #[tokio::main] @@ -30,7 +31,9 @@ //! let collateral = get_collateral(&pccs_url, "e).await.expect("failed to get collateral"); //! //! let now = std::time::SystemTime::now().duration_since(std::time::UNIX_EPOCH).unwrap().as_secs(); -//! let report = verify("e, &collateral, now).expect("failed to verify quote"); +//! let verifier = QuoteVerifier::new_prod(ring::backend()); +//! let result = verifier.verify("e, collateral, now).expect("verification failed"); +//! let report = result.validate(&SimplePolicy::strict(now)).expect("policy validation failed"); //! println!("{:?}", report); //! } //! ``` @@ -48,7 +51,9 @@ use borsh::BorshSchema; #[cfg(feature = "borsh")] use borsh::{BorshDeserialize, BorshSerialize}; -#[derive(Encode, Decode, Clone, PartialEq, Eq, Debug, Serialize, Deserialize)] +#[derive( + Encode, Decode, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize, Deserialize, +)] #[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] #[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] pub struct QuoteCollateralV3 { @@ -83,10 +88,26 @@ pub mod oids; mod constants; pub mod intel; -mod qe_identity; +pub mod qe_identity; pub mod tcb_info; mod utils; +// Common type aliases +pub use constants::{CpuSvn, Fmspc, MrEnclave, MrSigner, Svn}; + +// Re-export commonly used types +pub use policy::{ + PckCertFlag, PckIdentity, PlatformInfo, Policy, QeInfo, SimplePolicy, SimplePolicyConfig, + SupplementalData, TcbVerdict, +}; +pub use qe_identity::{QeIdentity, QeTcb, QeTcbLevel}; +pub use tcb_info::{Tcb, TcbComponents, TcbInfo, TcbLevel, TcbStatus, TcbStatusWithAdvisory}; +pub use verify::QuoteVerificationResult; + +#[cfg(feature = "rego")] +pub use policy::{RegoPolicy, RegoPolicySet}; + +pub mod policy; pub mod quote; pub mod verify; diff --git a/src/oids.rs b/src/oids.rs index cc56959..a7c0051 100644 --- a/src/oids.rs +++ b/src/oids.rs @@ -11,5 +11,13 @@ pub const PCEID: ObjectIdentifier = oid("1.2.840.113741.1.13.1.3"); pub const FMSPC: ObjectIdentifier = oid("1.2.840.113741.1.13.1.4"); pub const SGX_TYPE: ObjectIdentifier = oid("1.2.840.113741.1.13.1.5"); pub const PLATFORM_INSTANCE_ID: ObjectIdentifier = oid("1.2.840.113741.1.13.1.6"); +/// Configuration sequence (Platform CA certs only) +pub const CONFIGURATION: ObjectIdentifier = oid("1.2.840.113741.1.13.1.7"); +/// Whether platform can be extended with additional packages +pub const DYNAMIC_PLATFORM: ObjectIdentifier = oid("1.2.840.113741.1.13.1.7.1"); +/// Whether platform root keys are cached by SGX Registration Backend +pub const CACHED_KEYS: ObjectIdentifier = oid("1.2.840.113741.1.13.1.7.2"); +/// Whether platform has SMT (simultaneous multithreading) enabled +pub const SMT_ENABLED: ObjectIdentifier = oid("1.2.840.113741.1.13.1.7.3"); pub const PCESVN: ObjectIdentifier = oid("1.2.840.113741.1.13.1.2.17"); pub const CPUSVN: ObjectIdentifier = oid("1.2.840.113741.1.13.1.2.18"); diff --git a/src/policy/mod.rs b/src/policy/mod.rs new file mode 100644 index 0000000..0a4b1dd --- /dev/null +++ b/src/policy/mod.rs @@ -0,0 +1,190 @@ +use anyhow::Result; +use serde::{Deserialize, Serialize}; + +use { + crate::constants::*, + crate::qe_identity::QeTcbLevel, + crate::quote::{EnclaveReport, Report}, + crate::tcb_info::{TcbLevel, TcbStatus}, + alloc::string::String, + alloc::vec::Vec, +}; + +#[cfg(feature = "borsh_schema")] +use borsh::BorshSchema; +#[cfg(feature = "borsh")] +use borsh::{BorshDeserialize, BorshSerialize}; + +mod simple; +pub use simple::{SimplePolicy, SimplePolicyConfig}; + +#[cfg(feature = "rego")] +pub(crate) mod rego; +#[cfg(feature = "rego")] +pub use rego::RegoPolicy; +#[cfg(feature = "rego")] +pub use rego::RegoPolicySet; + +/// Policy trait for customizing quote verification behavior. +/// +/// Implement this trait to define custom validation logic for [`SupplementalData`]. +/// The library provides [`SimplePolicy`] as a comprehensive built-in implementation +/// that covers all common checks from Intel's Appraisal framework. +/// +/// For most use cases, [`SimplePolicy`] with its builder methods is sufficient: +/// ```no_run +/// use dcap_qvl::SimplePolicy; +/// use dcap_qvl::TcbStatus; +/// use core::time::Duration; +/// +/// let now_unix_secs = 1_700_000_000u64; +/// +/// let policy = SimplePolicy::strict(now_unix_secs) +/// .allow_status(TcbStatus::SWHardeningNeeded) +/// .collateral_grace_period(Duration::from_secs(90 * 24 * 3600)) +/// .reject_advisory("INTEL-SA-00334"); +/// ``` +/// +/// Implement this trait directly only for logic that [`SimplePolicy`] cannot express. +pub trait Policy { + /// Validate supplemental data against this policy. + /// + /// Return `Ok(())` to accept, or `Err(...)` to reject. + fn validate(&self, data: &SupplementalData) -> Result<()>; +} + +/// PCK certificate flag, matching Intel's `pck_cert_flag_enum_t`. +/// +/// These flags are only present in PCK certificates issued by the **Platform CA**. +/// For Processor CA certificates, the value is [`Undefined`](PckCertFlag::Undefined). +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] +#[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] +#[cfg_attr(feature = "borsh", borsh(use_discriminant = true))] +pub enum PckCertFlag { + /// The flag is explicitly false (ASN.1 BOOLEAN FALSE). + False = 0, + /// The flag is explicitly true (ASN.1 BOOLEAN TRUE). + True = 1, + /// The flag is not present in the certificate (Processor CA certs). + Undefined = 2, +} + +impl From> for PckCertFlag { + fn from(v: Option) -> Self { + match v { + Some(true) => PckCertFlag::True, + Some(false) => PckCertFlag::False, + None => PckCertFlag::Undefined, + } + } +} + +/// Supplemental data from quote verification. +/// +/// Organized into structured sub-groups: +/// - [`tcb`](Self::tcb): Merged TCB verdict +/// - [`platform`](Self::platform): Platform-level details from PCK certificate and TCB matching +/// - [`qe`](Self::qe): QE (Quoting Enclave) verification results +/// +/// Also includes the collateral time window (8 sources: TCBInfo, QEIdentity, 2 CRLs, +/// 4 certificate chains) and the quote report body. +pub struct SupplementalData { + /// TEE type: `0x00000000` for SGX, `0x00000081` for TDX. + pub tee_type: u32, + /// Merged TCB verdict (worst of platform + QE). + pub tcb: TcbVerdict, + /// Platform verification details. + pub platform: PlatformInfo, + /// QE verification details. + pub qe: QeInfo, + /// `min(issueDate / thisUpdate / notBefore)` across all 8 collateral sources. + pub earliest_issue_date: u64, + /// `max(issueDate / thisUpdate / notBefore)` across all 8 collateral sources. + pub latest_issue_date: u64, + /// `min(nextUpdate / notAfter)` across all 8 collateral sources (the "weakest link"). + pub earliest_expiration_date: u64, + /// `min(issueDate / notBefore)` across QE Identity sources (issuer chain + JSON). + pub qe_iden_earliest_issue_date: u64, + /// `max(issueDate / notBefore)` across QE Identity sources (issuer chain + JSON). + pub qe_iden_latest_issue_date: u64, + /// `min(nextUpdate / notAfter)` across QE Identity sources (issuer chain + JSON). + pub qe_iden_earliest_expiration_date: u64, + /// Quote report body (SGX enclave report, TDX TD10/TD15). + pub report: Report, +} + +/// Merged TCB verdict from platform and QE status convergence. +/// +/// Uses Intel's `convergeTcbStatusWithQeTcbStatus` logic to produce the +/// worst-case status and union of advisory IDs. +pub struct TcbVerdict { + /// Merged TCB status (worst of platform TCB + QE TCB). + pub status: TcbStatus, + /// Merged advisory IDs (union of platform + QE advisories). + pub advisory_ids: Vec, + /// Lower of TCBInfo and QEIdentity `tcbEvaluationDataNumber` values. + pub eval_data_number: u32, +} + +/// Platform-level verification results. +pub struct PlatformInfo { + /// The matched platform TCB level (unmerged). + pub tcb_level: TcbLevel, + /// Platform TCB level date as unix timestamp (precomputed from `tcb_level.tcb_date`). + pub tcb_date_tag: u64, + /// PCK certificate identity fields. + pub pck: PckIdentity, + /// SHA-384 of root CA's raw public key bytes, matching Intel's `root_key_id`. + pub root_key_id: [u8; 48], + /// CRL number from PCK Certificate Revocation List. + pub pck_crl_num: u32, + /// CRL number from Root CA Certificate Revocation List. + pub root_ca_crl_num: u32, +} + +/// QE (Quoting Enclave) verification results. +pub struct QeInfo { + /// The matched QE TCB level (unmerged). + pub tcb_level: QeTcbLevel, + /// The QE's enclave report. + pub report: EnclaveReport, + /// TCB evaluation data number from QE Identity (unmerged). + pub tcb_eval_data_number: u32, +} + +/// PCK certificate identity fields. +pub struct PckIdentity { + /// Platform Provisioning ID (PPID). + pub ppid: Vec, + /// CPU Security Version Number (16 bytes). + pub cpu_svn: CpuSvn, + /// PCE ISV Security Version Number. + pub pce_svn: Svn, + /// PCE ID (raw value from the PCK certificate SGX extension). + pub pce_id: Vec, + /// FMSPC (6 bytes). + pub fmspc: Fmspc, + /// SGX type: 0=Standard, 1=Scalable, 2=ScalableWithIntegrity. + pub sgx_type: u8, + /// Platform Instance ID (16 bytes, Platform CA only). + pub platform_instance_id: Option<[u8; 16]>, + /// Dynamic platform flag. + pub dynamic_platform: PckCertFlag, + /// Cached keys flag. + pub cached_keys: PckCertFlag, + /// SMT (hyperthreading) flag. + pub smt_enabled: PckCertFlag, + /// Platform Provider ID (Platform CA only, for Rego). + /// + /// Note: Intel's upstream DCAP Rego policy checks this field, but the + /// upstream QvE measurement producer currently leaves it unpopulated + /// (`//obj_plat_tcb.AddMember("platform_provider_id", , allocator);`). + /// + /// Upstream references: + /// - Rego check: + /// + /// - QvE TODO: + /// + pub platform_provider_id: Option, +} diff --git a/src/policy/rego.rs b/src/policy/rego.rs new file mode 100644 index 0000000..88451ef --- /dev/null +++ b/src/policy/rego.rs @@ -0,0 +1,1100 @@ +use super::*; +use serde_json::json; + +use crate::utils::parse_rfc3339_unix_secs; +use anyhow::{bail, Result}; + +/// Convert a unix timestamp (seconds) to an RFC3339 string. +/// Returns an empty string for timestamp 0 (matching Intel's behavior of omitting the field). +fn unix_to_rfc3339(secs: u64) -> String { + chrono::DateTime::from_timestamp(secs as i64, 0) + .map(|dt| dt.to_rfc3339_opts(chrono::SecondsFormat::Secs, true)) + .unwrap_or_default() +} + +/// Convert `TcbStatus` to the JSON string array that Intel's Rego expects. +/// +/// This matches Intel's `qv_result_tcb_status_map` in qve.cpp. +fn tcb_status_to_rego_array(status: TcbStatus) -> serde_json::Value { + match status { + TcbStatus::UpToDate => json!(["UpToDate"]), + TcbStatus::SWHardeningNeeded => json!(["UpToDate", "SWHardeningNeeded"]), + TcbStatus::ConfigurationNeeded => json!(["UpToDate", "ConfigurationNeeded"]), + TcbStatus::ConfigurationAndSWHardeningNeeded => { + json!(["UpToDate", "SWHardeningNeeded", "ConfigurationNeeded"]) + } + TcbStatus::OutOfDate => json!(["OutOfDate"]), + TcbStatus::OutOfDateConfigurationNeeded => { + json!(["OutOfDate", "ConfigurationNeeded"]) + } + TcbStatus::Revoked => json!(["Revoked"]), + } +} + +/// Build common platform fields into a Rego measurement JSON map. +fn insert_platform_fields( + m: &mut serde_json::Map, + data: &SupplementalData, +) { + // Time fields as RFC3339 strings (from SupplementalData's collateral time window) + let earliest_issue = unix_to_rfc3339(data.earliest_issue_date); + if !earliest_issue.is_empty() { + m.insert("earliest_issue_date".into(), json!(earliest_issue)); + } + let latest_issue = unix_to_rfc3339(data.latest_issue_date); + if !latest_issue.is_empty() { + m.insert("latest_issue_date".into(), json!(latest_issue)); + } + let earliest_exp = unix_to_rfc3339(data.earliest_expiration_date); + if !earliest_exp.is_empty() { + m.insert("earliest_expiration_date".into(), json!(earliest_exp)); + } + let tcb_date = unix_to_rfc3339(data.platform.tcb_date_tag); + if !tcb_date.is_empty() { + m.insert("tcb_level_date_tag".into(), json!(tcb_date)); + } + + m.insert("pck_crl_num".into(), json!(data.platform.pck_crl_num)); + m.insert( + "root_ca_crl_num".into(), + json!(data.platform.root_ca_crl_num), + ); + m.insert("tcb_eval_num".into(), json!(data.tcb.eval_data_number)); + m.insert("sgx_type".into(), json!(data.platform.pck.sgx_type)); + + if data.platform.pck.dynamic_platform != PckCertFlag::Undefined { + m.insert( + "is_dynamic_platform".into(), + json!(data.platform.pck.dynamic_platform == PckCertFlag::True), + ); + } + if data.platform.pck.cached_keys != PckCertFlag::Undefined { + m.insert( + "cached_keys".into(), + json!(data.platform.pck.cached_keys == PckCertFlag::True), + ); + } + if data.platform.pck.smt_enabled != PckCertFlag::Undefined { + m.insert( + "smt_enabled".into(), + json!(data.platform.pck.smt_enabled == PckCertFlag::True), + ); + } + + if let Some(ref provider_id) = data.platform.pck.platform_provider_id { + m.insert("platform_provider_id".into(), json!(provider_id)); + } + + m.insert( + "fmspc".into(), + json!(hex::encode_upper(data.platform.pck.fmspc)), + ); + m.insert( + "root_key_id".into(), + json!(hex::encode_upper(data.platform.root_key_id)), + ); +} + +/// Build merged Rego measurement (single-measurement path). +fn build_merged_measurement(data: &SupplementalData) -> serde_json::Value { + let mut m = serde_json::Map::new(); + m.insert( + "tcb_status".into(), + tcb_status_to_rego_array(data.tcb.status), + ); + insert_platform_fields(&mut m, data); + if !data.tcb.advisory_ids.is_empty() { + m.insert("advisory_ids".into(), json!(data.tcb.advisory_ids)); + } + serde_json::Value::Object(m) +} + +/// Build platform TCB measurement using **unmerged** platform status. +fn build_platform_measurement(data: &SupplementalData) -> serde_json::Value { + let mut m = serde_json::Map::new(); + m.insert( + "tcb_status".into(), + tcb_status_to_rego_array(data.platform.tcb_level.tcb_status), + ); + insert_platform_fields(&mut m, data); + if !data.platform.tcb_level.advisory_ids.is_empty() { + m.insert( + "advisory_ids".into(), + json!(data.platform.tcb_level.advisory_ids), + ); + } + serde_json::Value::Object(m) +} + +/// Build QE Identity measurement for Rego appraisal (TDX). +fn build_qe_measurement(data: &SupplementalData) -> Result { + let mut m = serde_json::Map::new(); + + m.insert( + "tcb_status".into(), + tcb_status_to_rego_array(data.qe.tcb_level.tcb_status), + ); + + let qe_tcb_date = parse_rfc3339_unix_secs(&data.qe.tcb_level.tcb_date) + .map_err(|e| anyhow::anyhow!("Failed to parse QE TCB date: {e}"))?; + let qe_date_str = unix_to_rfc3339(qe_tcb_date); + if !qe_date_str.is_empty() { + m.insert("tcb_level_date_tag".into(), json!(qe_date_str)); + } + + let earliest_issue = unix_to_rfc3339(data.qe_iden_earliest_issue_date); + if !earliest_issue.is_empty() { + m.insert("earliest_issue_date".into(), json!(earliest_issue)); + } + let latest_issue = unix_to_rfc3339(data.qe_iden_latest_issue_date); + if !latest_issue.is_empty() { + m.insert("latest_issue_date".into(), json!(latest_issue)); + } + let earliest_exp = unix_to_rfc3339(data.qe_iden_earliest_expiration_date); + if !earliest_exp.is_empty() { + m.insert("earliest_expiration_date".into(), json!(earliest_exp)); + } + + m.insert("tcb_eval_num".into(), json!(data.qe.tcb_eval_data_number)); + m.insert( + "root_key_id".into(), + json!(hex::encode_upper(data.platform.root_key_id)), + ); + + Ok(serde_json::Value::Object(m)) +} + +// ── Tenant measurement helpers ───────────────────────────────────────── + +use crate::quote::{Report, TDReport10, TDReport15}; + +/// Generate SGX enclave measurement JSON from an `EnclaveReport`. +/// +/// KSS fields are extracted from reserved areas matching Intel's `sgx_report_body_t` layout: +/// - `isv_ext_prod_id`: reserved1\[12..28\] (16B at offset 32) +/// - `config_id`: reserved3\[32..96\] (64B at offset 192) +/// - `config_svn`: reserved4\[0..2\] (u16 LE at offset 260) +/// - `isv_family_id`: reserved4\[44..60\] (16B at offset 304) +pub(crate) fn sgx_enclave_measurement(report: &EnclaveReport) -> serde_json::Value { + let mut m = serde_json::Map::new(); + + m.insert( + "sgx_miscselect".into(), + json!(hex::encode_upper(report.misc_select.to_le_bytes())), + ); + m.insert( + "sgx_attributes".into(), + json!(hex::encode_upper(report.attributes)), + ); + m.insert( + "sgx_mrenclave".into(), + json!(hex::encode_upper(report.mr_enclave)), + ); + m.insert( + "sgx_mrsigner".into(), + json!(hex::encode_upper(report.mr_signer)), + ); + m.insert("sgx_isvprodid".into(), json!(report.isv_prod_id)); + m.insert("sgx_isvsvn".into(), json!(report.isv_svn)); + m.insert( + "sgx_reportdata".into(), + json!(hex::encode_upper(report.report_data)), + ); + + // KSS fields from reserved areas (Intel sgx_report_body_t layout) + if let Some(ext_prod_id) = report.reserved1.get(12..28) { + m.insert( + "sgx_isvextprodid".into(), + json!(hex::encode_upper(ext_prod_id)), + ); + } + if let Some(config_id) = report.reserved3.get(32..96) { + m.insert("sgx_configid".into(), json!(hex::encode_upper(config_id))); + } + if let Some(config_svn_bytes) = report + .reserved4 + .get(0..2) + .and_then(|s| <[u8; 2]>::try_from(s).ok()) + { + let config_svn = u16::from_le_bytes(config_svn_bytes); + m.insert("sgx_configsvn".into(), json!(config_svn)); + } + if let Some(family_id) = report.reserved4.get(44..60) { + m.insert( + "sgx_isvfamilyid".into(), + json!(hex::encode_upper(family_id)), + ); + } + + serde_json::Value::Object(m) +} + +/// Generate TDX TD 1.0 measurement JSON from a `TDReport10`. +fn td10_measurement(report: &TDReport10) -> serde_json::Value { + let mut m = serde_json::Map::new(); + + m.insert( + "tdx_attributes".into(), + json!(hex::encode_upper(report.td_attributes)), + ); + m.insert("tdx_xfam".into(), json!(hex::encode_upper(report.xfam))); + m.insert("tdx_mrtd".into(), json!(hex::encode_upper(report.mr_td))); + m.insert( + "tdx_mrconfigid".into(), + json!(hex::encode_upper(report.mr_config_id)), + ); + m.insert( + "tdx_mrowner".into(), + json!(hex::encode_upper(report.mr_owner)), + ); + m.insert( + "tdx_mrownerconfig".into(), + json!(hex::encode_upper(report.mr_owner_config)), + ); + m.insert("tdx_rtmr0".into(), json!(hex::encode_upper(report.rt_mr0))); + m.insert("tdx_rtmr1".into(), json!(hex::encode_upper(report.rt_mr1))); + m.insert("tdx_rtmr2".into(), json!(hex::encode_upper(report.rt_mr2))); + m.insert("tdx_rtmr3".into(), json!(hex::encode_upper(report.rt_mr3))); + m.insert( + "tdx_reportdata".into(), + json!(hex::encode_upper(report.report_data)), + ); + + serde_json::Value::Object(m) +} + +/// Generate TDX TD 1.5 measurement JSON from a `TDReport15`. +fn td15_measurement(report: &TDReport15) -> serde_json::Value { + let mut m = td10_measurement(&report.base); + if let Some(obj) = m.as_object_mut() { + obj.insert( + "tdx_mrservicetd".into(), + json!(hex::encode_upper(report.mr_service_td)), + ); + } + m +} + +/// Generate tenant measurement JSON from a `Report`. +pub(crate) fn tenant_measurement(report: &Report) -> serde_json::Value { + match report { + Report::SgxEnclave(er) => sgx_enclave_measurement(er), + Report::TD10(td) => td10_measurement(td), + Report::TD15(td) => td15_measurement(td), + } +} + +/// Returns the tenant class_id for the given report type. +pub(crate) fn tenant_class_id(report: &Report) -> &'static str { + match report { + Report::SgxEnclave(_) => "bef7cb8c-31aa-42c1-854c-10db005d5c41", + Report::TD10(_) => "a1e4ee9c-a12e-48ac-bed0-e3f89297f687", + Report::TD15(_) => "45b734fc-aa4e-4c3d-ad28-e43d08880e68", + } +} + +/// Returns the platform class_id for the given report type and tee_type. +pub(crate) fn platform_class_id(report: &Report, tee_type: u32) -> &'static str { + match (report, tee_type) { + (Report::TD10(_), _) => "9eec018b-7481-4b1c-8e1a-9f7c0c8c777f", + (Report::TD15(_), _) => "f708b97f-0fb2-4e6b-8b03-8a5bcd1221d3", + _ => "3123ec35-8d38-4ea5-87a5-d6c48b567570", // SGX + } +} + +// ── RegoPolicySet ────────────────────────────────────────────────────── + +/// A set of Rego policies for multi-measurement appraisal. +/// +/// Accepts multiple policy JSON objects (one per class_id). The Rego engine +/// matches each `qvl_result` entry to its corresponding policy by `class_id`. +/// +/// This provides full Intel QAL compatibility with separate evaluation of +/// platform TCB, QE identity, and tenant measurements. +/// +/// # Example +/// +/// ```no_run +/// use dcap_qvl::RegoPolicySet; +/// +/// let platform_policy = r#"{ +/// "environment": { "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570" }, +/// "reference": { "accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0 } +/// }"#; +/// let enclave_policy = r#"{ +/// "environment": { "class_id": "bef7cb8c-31aa-42c1-854c-10db005d5c41" }, +/// "reference": { "sgx_mrenclave": "ABCD..." } +/// }"#; +/// let policies = RegoPolicySet::new(&[platform_policy, enclave_policy]).unwrap(); +/// ``` +pub struct RegoPolicySet { + engine: regorus::Engine, + policies: Vec, +} + +impl RegoPolicySet { + /// Create a `RegoPolicySet` from multiple Intel JSON policy strings. + /// + /// Uses the bundled `qal_script.rego`. Each JSON must have `environment.class_id`. + pub fn new(policy_jsons: &[&str]) -> Result { + Self::with_rego(policy_jsons, include_str!("../../rego/qal_script.rego")) + } + + /// Create a `RegoPolicySet` with a custom Rego script. + pub fn with_rego(policy_jsons: &[&str], rego_source: &str) -> Result { + let mut engine = regorus::Engine::new(); + register_rand_intn(&mut engine)?; + engine + .add_policy("qal_script.rego".into(), rego_source.into()) + .map_err(|e| anyhow::anyhow!("Failed to load Rego policy: {e}"))?; + + let mut policies = Vec::new(); + for json_str in policy_jsons { + let policy: serde_json::Value = serde_json::from_str(json_str) + .map_err(|e| anyhow::anyhow!("Failed to parse policy JSON: {e}"))?; + // Validate that class_id exists + policy + .get("environment") + .and_then(|e| e.get("class_id")) + .and_then(|c| c.as_str()) + .ok_or_else(|| anyhow::anyhow!("Policy JSON missing environment.class_id"))?; + policies.push(policy); + } + + Ok(Self { engine, policies }) + } +} + +/// Register `rand.intn` extension on a regorus engine. +/// +/// OPA's `rand.intn(str, n)` returns a random integer in `[0, n)`. +/// The `str` parameter is a **memoization key** (not a PRNG seed): same `(str, n)` pair +/// within one query evaluation always returns the same result. The actual random number +/// comes from a separate RNG, not derived from the string. +/// +/// Cache key is `"{str}-{n}"` matching OPA's implementation. +/// +/// Ref: OPA docs — "For any given argument pair (str, n), the output will be consistent +/// throughout a query evaluation." +/// +/// +/// Ref: OPA source — `key := randIntCachingKey(fmt.Sprintf("%s-%d", strOp, n))` +/// +fn register_rand_intn(engine: &mut regorus::Engine) -> Result<()> { + let mut cache = std::collections::HashMap::::new(); + engine + .add_extension( + "rand.intn".to_string(), + 2, + Box::new(move |params: Vec| { + let seed = params + .first() + .ok_or_else(|| anyhow::anyhow!("rand.intn: missing first argument"))? + .as_string() + .map_err(|_| anyhow::anyhow!("rand.intn: first argument must be string"))? + .to_string(); + + let n = params + .get(1) + .ok_or_else(|| anyhow::anyhow!("rand.intn: missing second argument"))? + .as_i64() + .map_err(|_| anyhow::anyhow!("rand.intn: second argument must be integer"))?; + + if n == 0 { + return Ok(regorus::Value::from(0i64)); + } + + // OPA uses abs(n) for negative values + let n = n.unsigned_abs(); + + // Cache key = "{seed}-{n}", matching OPA's `fmt.Sprintf("%s-%d", strOp, n)` + // Note: OPA caches with abs'd n, so "-5" and "5" share the same key. + let key = alloc::format!("{seed}-{n}"); + + if let Some(&cached) = cache.get(&key) { + return Ok(regorus::Value::from(cached)); + } + + let mut buf = [0u8; 8]; + getrandom::getrandom(&mut buf) + .map_err(|e| anyhow::anyhow!("rand.intn: RNG failed: {e}"))?; + let random_val = (u64::from_le_bytes(buf).checked_rem(n).unwrap_or(0)) as i64; + cache.insert(key, random_val); + Ok(regorus::Value::from(random_val)) + }), + ) + .map_err(|e| anyhow::anyhow!("Failed to register rand.intn: {e}")) +} + +/// Shared Rego evaluation logic used by both `RegoPolicy` and `RegoPolicySet`. +fn eval_rego_engine( + engine: ®orus::Engine, + policies: &[&serde_json::Value], + qvl_result: Vec, +) -> Result<()> { + let mut engine = engine.clone(); + + let input = json!({ + "qvl_result": qvl_result, + "policies": { + "policy_array": policies, + } + }); + + let input_str = serde_json::to_string(&input) + .map_err(|e| anyhow::anyhow!("Failed to serialize Rego input: {e}"))?; + engine + .set_input_json(&input_str) + .map_err(|e| anyhow::anyhow!("Failed to set Rego input: {e}"))?; + + let result = engine + .eval_rule("data.dcap.quote.appraisal.final_appraisal_result".into()) + .map_err(|e| anyhow::anyhow!("Rego evaluation failed: {e}"))?; + + let result_json = result + .to_json_str() + .map_err(|e| anyhow::anyhow!("Failed to convert Rego result: {e}"))?; + + // final_appraisal_result is a Rego set → JSON array of objects + let result_value: serde_json::Value = serde_json::from_str(&result_json) + .map_err(|e| anyhow::anyhow!("Failed to parse final_appraisal_result JSON: {e}"))?; + + let arr = result_value + .as_array() + .ok_or_else(|| anyhow::anyhow!("final_appraisal_result is not an array"))?; + + let entry = arr + .first() + .ok_or_else(|| anyhow::anyhow!("final_appraisal_result is empty"))?; + + let overall = entry + .get("overall_appraisal_result") + .and_then(|v| v.as_i64()) + .ok_or_else(|| { + anyhow::anyhow!("final_appraisal_result missing overall_appraisal_result") + })?; + + match overall { + 1 => Ok(()), + 0 => { + let detail = entry.get("appraised_reports"); + if let Some(detail) = detail { + bail!("Rego appraisal failed: {detail}"); + } + bail!("Rego appraisal failed (result = 0)"); + } + -1 => bail!("No policy matched the report class_id"), + other => bail!("Unexpected Rego appraisal result: {other}"), + } +} + +/// Policy implementation that evaluates Intel's `qal_script.rego` via the +/// [regorus](https://github.com/microsoft/regorus) Rego interpreter. +/// +/// This provides bit-exact compatibility with Intel's Quote Appraisal Library (QAL). +/// Users provide a JSON policy in Intel's format (the `reference` object from a +/// Quote Appraisal Policy), and the Rego script evaluates it against the +/// [`SupplementalData`] converted to Intel's measurement JSON format. +/// +/// # Example +/// +/// ```no_run +/// use dcap_qvl::RegoPolicy; +/// +/// let policy_json = r#"{ +/// "environment": { +/// "class_id": "3123ec35-8d38-4ea5-87a5-d6c48b567570", +/// "description": "Strict SGX platform TCB policy" +/// }, +/// "reference": { +/// "accepted_tcb_status": ["UpToDate"], +/// "collateral_grace_period": 0 +/// } +/// }"#; +/// let policy = RegoPolicy::new(policy_json).expect("invalid policy"); +/// ``` +pub struct RegoPolicy { + engine: regorus::Engine, + policy_json: serde_json::Value, + class_id: String, +} + +impl RegoPolicy { + /// Create a `RegoPolicy` from an Intel JSON policy string. + /// + /// Uses the bundled `qal_script.rego` (from Intel's DCAP source). + /// The JSON must contain `environment.class_id` to identify the policy type. + pub fn new(policy_json: &str) -> Result { + Self::with_rego(policy_json, include_str!("../../rego/qal_script.rego")) + } + + /// Create a `RegoPolicy` with a custom Rego script. + /// + /// Use this to provide an updated or modified version of `qal_script.rego`. + pub fn with_rego(policy_json: &str, rego_source: &str) -> Result { + let mut engine = regorus::Engine::new(); + register_rand_intn(&mut engine)?; + engine + .add_policy("qal_script.rego".into(), rego_source.into()) + .map_err(|e| anyhow::anyhow!("Failed to load Rego policy: {e}"))?; + + let policy: serde_json::Value = serde_json::from_str(policy_json) + .map_err(|e| anyhow::anyhow!("Failed to parse policy JSON: {e}"))?; + + let class_id = policy + .get("environment") + .and_then(|e| e.get("class_id")) + .and_then(|c| c.as_str()) + .ok_or_else(|| anyhow::anyhow!("Policy JSON missing environment.class_id"))? + .to_string(); + + Ok(Self { + engine, + policy_json: policy, + class_id, + }) + } +} + +impl Policy for RegoPolicy { + fn validate(&self, data: &SupplementalData) -> Result<()> { + let measurement = build_merged_measurement(data); + let qvl_result = vec![json!({ + "environment": { "class_id": &self.class_id }, + "measurement": measurement, + })]; + eval_rego_engine(&self.engine, &[&self.policy_json], qvl_result) + } +} + +impl Policy for RegoPolicySet { + fn validate(&self, data: &SupplementalData) -> Result<()> { + let qvl_result = to_rego_qvl_result(data)?; + let policy_refs: Vec<&serde_json::Value> = self.policies.iter().collect(); + eval_rego_engine(&self.engine, &policy_refs, qvl_result) + } +} + +/// Generate Intel-format `qvl_result` array for Rego appraisal from [`SupplementalData`]. +/// +/// SGX quotes produce 2 entries (platform + enclave). +/// TDX quotes produce 3 entries (platform + QE identity + TD). +fn to_rego_qvl_result(data: &SupplementalData) -> Result> { + use crate::quote::Report; + + let mut result = Vec::new(); + + // 1. Platform TCB measurement + let platform_cid = platform_class_id(&data.report, data.tee_type); + result.push(json!({ + "environment": { "class_id": platform_cid }, + "measurement": build_platform_measurement(data), + })); + + // 2. QE Identity measurement (TDX only) + if matches!(data.report, Report::TD10(_) | Report::TD15(_)) { + result.push(json!({ + "environment": { "class_id": "3769258c-75e6-4bc7-8d72-d2b0e224cad2" }, + "measurement": build_qe_measurement(data)?, + })); + } + + // 3. Tenant measurement (enclave or TD report) + let tenant_cid = tenant_class_id(&data.report); + let mut tenant_m = tenant_measurement(&data.report); + // For SGX enclave, add sgx_ce_attributes from the QE report + if let Report::SgxEnclave(_) = &data.report { + if let Some(obj) = tenant_m.as_object_mut() { + obj.insert( + "sgx_ce_attributes".into(), + json!(hex::encode_upper(data.qe.report.attributes)), + ); + } + } + result.push(json!({ + "environment": { "class_id": tenant_cid }, + "measurement": tenant_m, + })); + + Ok(result) +} + +#[cfg(test)] +#[allow( + clippy::unwrap_used, + clippy::indexing_slicing, + clippy::manual_range_contains +)] +mod tests { + use super::*; + use crate::policy::{PckCertFlag, PckIdentity, PlatformInfo, QeInfo, TcbVerdict}; + use crate::tcb_info::TcbStatus::*; + + const SGX_PLATFORM_CLASS_ID: &str = "3123ec35-8d38-4ea5-87a5-d6c48b567570"; + + fn make_test_supplemental(tcb_status: TcbStatus) -> SupplementalData { + use crate::qe_identity::{QeTcb, QeTcbLevel}; + use crate::tcb_info::{Tcb, TcbComponents, TcbLevel}; + + SupplementalData { + tee_type: 0, + tcb: TcbVerdict { + status: tcb_status, + advisory_ids: vec![], + eval_data_number: 17, + }, + platform: PlatformInfo { + tcb_level: TcbLevel { + tcb: Tcb { + sgx_components: vec![TcbComponents { svn: 0 }; 16], + tdx_components: vec![], + pce_svn: 13, + }, + tcb_date: "2023-07-22T00:00:00Z".to_string(), + tcb_status, + advisory_ids: vec![], + }, + tcb_date_tag: 1_690_000_000, + pck: PckIdentity { + ppid: vec![0u8; 16], + cpu_svn: [0u8; 16], + pce_svn: 13, + pce_id: vec![0u8; 2], + fmspc: [0u8; 6], + sgx_type: 0, + platform_instance_id: None, + dynamic_platform: PckCertFlag::Undefined, + cached_keys: PckCertFlag::Undefined, + smt_enabled: PckCertFlag::Undefined, + platform_provider_id: None, + }, + root_key_id: [0u8; 48], + pck_crl_num: 1, + root_ca_crl_num: 1, + }, + qe: QeInfo { + tcb_level: QeTcbLevel { + tcb: QeTcb { isvsvn: 8 }, + tcb_date: "2024-03-13T00:00:00Z".to_string(), + tcb_status: UpToDate, + advisory_ids: vec![], + }, + report: crate::quote::EnclaveReport { + cpu_svn: [0u8; 16], + misc_select: 0, + reserved1: [0u8; 28], + attributes: [0u8; 16], + mr_enclave: [0u8; 32], + reserved2: [0u8; 32], + mr_signer: [0u8; 32], + reserved3: [0u8; 96], + isv_prod_id: 1, + isv_svn: 8, + reserved4: [0u8; 60], + report_data: [0u8; 64], + }, + tcb_eval_data_number: 17, + }, + report: crate::quote::Report::SgxEnclave(crate::quote::EnclaveReport { + cpu_svn: [0u8; 16], + misc_select: 0, + reserved1: [0u8; 28], + attributes: [0u8; 16], + mr_enclave: [0u8; 32], + reserved2: [0u8; 32], + mr_signer: [0u8; 32], + reserved3: [0u8; 96], + isv_prod_id: 0, + isv_svn: 0, + reserved4: [0u8; 60], + report_data: [0u8; 64], + }), + earliest_issue_date: 1_690_000_000, + latest_issue_date: 1_690_100_000, + earliest_expiration_date: 1_703_000_000, + qe_iden_earliest_issue_date: 1_690_000_000, + qe_iden_latest_issue_date: 1_690_100_000, + qe_iden_earliest_expiration_date: 1_703_000_000, + } + } + + /// Create test supplemental data with future dates for Rego (uses real wall clock). + fn make_rego_supplemental(status: TcbStatus) -> SupplementalData { + let mut data = make_test_supplemental(status); + data.earliest_issue_date = 1_900_000_000; + data.latest_issue_date = 1_900_100_000; + data.earliest_expiration_date = 2_000_000_000; + data.qe_iden_earliest_issue_date = 1_900_000_000; + data.qe_iden_latest_issue_date = 1_900_100_000; + data.qe_iden_earliest_expiration_date = 2_000_000_000; + data + } + + fn policy_json(reference: &str) -> String { + format!( + r#"{{ + "environment": {{ + "class_id": "{SGX_PLATFORM_CLASS_ID}", + "description": "Test policy" + }}, + "reference": {reference} + }}"# + ) + } + + #[test] + fn rego_strict_accepts_up_to_date() { + let data = make_rego_supplemental(UpToDate); + let json = + policy_json(r#"{"accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0}"#); + let policy = RegoPolicy::new(&json).unwrap(); + let result = policy.validate(&data); + assert!( + result.is_ok(), + "expected Ok, got: {:?}", + result.unwrap_err() + ); + } + + #[test] + fn rego_strict_rejects_out_of_date() { + let data = make_rego_supplemental(OutOfDate); + let json = + policy_json(r#"{"accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0}"#); + let policy = RegoPolicy::new(&json).unwrap(); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!( + err.contains("appraisal failed"), + "expected appraisal failure, got: {err}" + ); + } + + #[test] + fn rego_permissive_accepts_out_of_date() { + let data = make_rego_supplemental(OutOfDate); + let json = policy_json( + r#"{"accepted_tcb_status": ["UpToDate", "OutOfDate"], "collateral_grace_period": 0}"#, + ); + let policy = RegoPolicy::new(&json).unwrap(); + let result = policy.validate(&data); + assert!( + result.is_ok(), + "expected Ok, got: {:?}", + result.unwrap_err() + ); + } + + #[test] + fn rego_rejects_advisory() { + let mut data = make_rego_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00334".into()]; + let json = policy_json( + r#"{ + "accepted_tcb_status": ["UpToDate"], + "collateral_grace_period": 0, + "rejected_advisory_ids": ["INTEL-SA-00334"] + }"#, + ); + let policy = RegoPolicy::new(&json).unwrap(); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!( + err.contains("appraisal failed"), + "expected advisory rejection, got: {err}" + ); + } + + #[test] + fn rego_platform_grace_period_accepts() { + let mut data = make_rego_supplemental(OutOfDate); + data.platform.tcb_date_tag = 1_690_000_000; + let json = policy_json( + r#"{ + "accepted_tcb_status": ["UpToDate", "OutOfDate"], + "collateral_grace_period": 0, + "platform_grace_period": 999999999 + }"#, + ); + let policy = RegoPolicy::new(&json).unwrap(); + let result = policy.validate(&data); + assert!( + result.is_ok(), + "expected Ok, got: {:?}", + result.unwrap_err() + ); + } + + #[test] + fn rego_expiration_check_rejects_expired_collateral() { + let mut data = make_rego_supplemental(UpToDate); + // Override with past dates — collateral expired + data.earliest_issue_date = 1_700_000_000; + data.latest_issue_date = 1_700_100_000; + data.earliest_expiration_date = 1_703_000_000; + let json = + policy_json(r#"{"accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0}"#); + let policy = RegoPolicy::new(&json).unwrap(); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!( + err.contains("appraisal failed"), + "expected expiration failure, got: {err}" + ); + } + + #[test] + fn rego_no_collateral_grace_skips_expiration_check() { + let mut data = make_rego_supplemental(UpToDate); + // Override with past dates — but no grace period in policy means no check + data.earliest_issue_date = 1_700_000_000; + data.latest_issue_date = 1_700_100_000; + data.earliest_expiration_date = 1_703_000_000; + let json = policy_json(r#"{"accepted_tcb_status": ["UpToDate"]}"#); + let policy = RegoPolicy::new(&json).unwrap(); + let result = policy.validate(&data); + assert!( + result.is_ok(), + "expected Ok (no expiration check), got: {:?}", + result.unwrap_err() + ); + } + + #[test] + fn rego_missing_class_id_errors() { + let json = r#"{"reference": {"accepted_tcb_status": ["UpToDate"]}}"#; + assert!(RegoPolicy::new(json).is_err()); + } + + #[test] + fn rego_to_measurement_tcb_status_mapping() { + let data = make_test_supplemental(ConfigurationAndSWHardeningNeeded); + let m = build_merged_measurement(&data); + let statuses = m.get("tcb_status").unwrap().as_array().unwrap(); + assert_eq!(statuses.len(), 3); + assert_eq!(statuses[0], "UpToDate"); + assert_eq!(statuses[1], "SWHardeningNeeded"); + assert_eq!(statuses[2], "ConfigurationNeeded"); + } + + #[test] + fn rego_to_measurement_omits_undefined_flags() { + let data = make_test_supplemental(UpToDate); + assert_eq!(data.platform.pck.dynamic_platform, PckCertFlag::Undefined); + let m = build_merged_measurement(&data); + assert!(m.get("is_dynamic_platform").is_none()); + assert!(m.get("cached_keys").is_none()); + assert!(m.get("smt_enabled").is_none()); + } + + #[test] + fn rego_to_measurement_includes_true_flags() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.dynamic_platform = PckCertFlag::True; + data.platform.pck.cached_keys = PckCertFlag::False; + data.platform.pck.smt_enabled = PckCertFlag::True; + let m = build_merged_measurement(&data); + assert_eq!(m.get("is_dynamic_platform").unwrap(), true); + assert_eq!(m.get("cached_keys").unwrap(), false); + assert_eq!(m.get("smt_enabled").unwrap(), true); + } + + #[test] + fn rego_platform_measurement_uses_unmerged_status() { + let mut data = make_test_supplemental(UpToDate); + data.platform.tcb_level.tcb_status = OutOfDate; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00001".into()]; + let m = build_platform_measurement(&data); + let statuses = m.get("tcb_status").unwrap().as_array().unwrap(); + assert!(statuses.contains(&serde_json::json!("OutOfDate"))); + let advisories = m.get("advisory_ids").unwrap().as_array().unwrap(); + assert_eq!(advisories, &[serde_json::json!("INTEL-SA-00001")]); + } + + #[test] + fn rego_qe_measurement_fields() { + let data = make_rego_supplemental(UpToDate); + let m = build_qe_measurement(&data).unwrap(); + assert!(m.get("tcb_status").is_some()); + assert_eq!(m.get("tcb_eval_num").unwrap(), 17); + assert!(m.get("root_key_id").is_some()); + assert!(m.get("earliest_issue_date").is_some()); + assert!(m.get("latest_issue_date").is_some()); + assert!(m.get("earliest_expiration_date").is_some()); + assert!(m.get("tcb_level_date_tag").is_some()); + } + + #[test] + fn rego_sgx_enclave_measurement_fields() { + use crate::quote::EnclaveReport; + + let mut report = EnclaveReport { + cpu_svn: [0u8; 16], + misc_select: 0x12345678, + reserved1: [0u8; 28], + attributes: [0xAA; 16], + mr_enclave: [0xBB; 32], + reserved2: [0u8; 32], + mr_signer: [0xCC; 32], + reserved3: [0u8; 96], + isv_prod_id: 42, + isv_svn: 7, + reserved4: [0u8; 60], + report_data: [0xDD; 64], + }; + report.reserved1[12..28].copy_from_slice(&[0x11; 16]); + report.reserved3[32..96].copy_from_slice(&[0x22; 64]); + report.reserved4[0..2].copy_from_slice(&42u16.to_le_bytes()); + report.reserved4[44..60].copy_from_slice(&[0x33; 16]); + + let m = sgx_enclave_measurement(&report); + assert!(m.get("sgx_mrenclave").is_some()); + assert!(m.get("sgx_mrsigner").is_some()); + assert_eq!(m.get("sgx_isvprodid").unwrap(), 42); + assert_eq!(m.get("sgx_isvsvn").unwrap(), 7); + assert!(m.get("sgx_reportdata").is_some()); + assert!(m.get("sgx_configid").is_some()); + assert_eq!(m.get("sgx_configsvn").unwrap(), 42); + assert!(m.get("sgx_isvextprodid").is_some()); + assert!(m.get("sgx_isvfamilyid").is_some()); + } + + #[test] + fn rego_policy_set_sgx_platform_accepts() { + let data = make_rego_supplemental(UpToDate); + let platform_json = format!( + r#"{{ + "environment": {{ "class_id": "{SGX_PLATFORM_CLASS_ID}" }}, + "reference": {{ "accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0 }} + }}"# + ); + let policies = RegoPolicySet::new(&[&platform_json]).unwrap(); + let result = policies.validate(&data); + assert!( + result.is_ok(), + "expected Ok, got: {:?}", + result.unwrap_err() + ); + } + + #[test] + fn rego_final_appraisal_result_has_expected_fields() { + // Verify that eval uses final_appraisal_result (not final_ret) by checking + // the Rego engine can produce the full appraisal output with nonce/timestamp. + let data = make_rego_supplemental(UpToDate); + let json = + policy_json(r#"{"accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0}"#); + + let mut engine = regorus::Engine::new(); + register_rand_intn(&mut engine).unwrap(); + engine + .add_policy( + "qal_script.rego".into(), + include_str!("../../rego/qal_script.rego").into(), + ) + .unwrap(); + + let policy_value: serde_json::Value = serde_json::from_str(&json).unwrap(); + let measurement = build_merged_measurement(&data); + let class_id = policy_value["environment"]["class_id"].as_str().unwrap(); + let qvl_result = vec![json!({ + "environment": { "class_id": class_id }, + "measurement": measurement, + })]; + let input = json!({ + "qvl_result": qvl_result, + "policies": { "policy_array": [&policy_value] }, + }); + engine + .set_input_json(&serde_json::to_string(&input).unwrap()) + .unwrap(); + + let result = engine + .eval_rule("data.dcap.quote.appraisal.final_appraisal_result".into()) + .unwrap(); + let result_json: serde_json::Value = + serde_json::from_str(&result.to_json_str().unwrap()).unwrap(); + let arr = result_json.as_array().unwrap(); + assert_eq!(arr.len(), 1, "expected exactly one appraisal result"); + let entry = &arr[0]; + assert_eq!(entry["overall_appraisal_result"], 1); + assert!(entry.get("nonce").is_some(), "missing nonce from rand.intn"); + assert!( + entry.get("appraisal_check_date").is_some(), + "missing appraisal_check_date from time.now_ns" + ); + assert!( + entry.get("appraised_reports").is_some(), + "missing appraised_reports" + ); + // nonce should be a non-negative integer < 10^15 + let nonce = entry["nonce"].as_i64().unwrap(); + assert!( + nonce >= 0 && nonce < 1_000_000_000_000_000, + "nonce out of range: {nonce}" + ); + } + + #[test] + fn rego_rand_intn_memoization() { + // Same (seed, n) pair within one engine evaluation → same result. + let mut engine = regorus::Engine::new(); + register_rand_intn(&mut engine).unwrap(); + engine + .add_policy( + "test.rego".into(), + r#"package test + import future.keywords.if + a := rand.intn("memo_test", 1000000000) + b := rand.intn("memo_test", 1000000000) + same if { a == b } + "# + .into(), + ) + .unwrap(); + engine.set_input_json("{}").unwrap(); + + let same = engine + .eval_rule("data.test.same".into()) + .unwrap() + .to_json_str() + .unwrap(); + assert_eq!( + same.trim(), + "true", + "rand.intn memoization failed: same seed should return same value" + ); + } + + #[test] + fn rego_qe_measurement_uses_qe_iden_dates() { + let mut data = make_rego_supplemental(UpToDate); + // Set QE-specific dates different from global dates + data.qe_iden_earliest_issue_date = 1_850_000_000; + data.qe_iden_latest_issue_date = 1_850_100_000; + data.qe_iden_earliest_expiration_date = 1_950_000_000; + let m = build_qe_measurement(&data).unwrap(); + // QE measurement should use qe_iden_* dates, not the global ones + let earliest = m.get("earliest_issue_date").unwrap().as_str().unwrap(); + let expected = "2028-08-16T"; // 1_850_000_000 = 2028-08-16T00:53:20Z + assert!( + earliest.starts_with(expected), + "QE measurement should use qe_iden_earliest_issue_date, got: {earliest}" + ); + } + + #[test] + fn rego_policy_set_class_id_mismatch_fails() { + let data = make_rego_supplemental(UpToDate); + let tdx_class_id = "9eec018b-7481-4b1c-8e1a-9f7c0c8c777f"; + let policy_json = format!( + r#"{{ + "environment": {{ "class_id": "{tdx_class_id}" }}, + "reference": {{ "accepted_tcb_status": ["UpToDate"], "collateral_grace_period": 0 }} + }}"# + ); + let policies = RegoPolicySet::new(&[&policy_json]).unwrap(); + let err = policies.validate(&data).unwrap_err().to_string(); + assert!( + err.contains("appraisal failed"), + "expected appraisal failure on class_id mismatch, got: {err}" + ); + } +} diff --git a/src/policy/simple.rs b/src/policy/simple.rs new file mode 100644 index 0000000..6c6cc0e --- /dev/null +++ b/src/policy/simple.rs @@ -0,0 +1,832 @@ +use core::time::Duration; + +use anyhow::{bail, Result}; +use serde::{Deserialize, Serialize}; + +use { + super::{PckCertFlag, Policy, SupplementalData}, + crate::tcb_info::TcbStatus, + crate::utils::parse_rfc3339_unix_secs, + alloc::string::String, + alloc::vec::Vec, +}; + +/// Built-in verification policy with builder pattern. +/// +/// Covers the 9 checks from Intel's Appraisal framework (`qal_script.rego`) +/// without requiring a Rego engine. Strict by default: only `UpToDate`, +/// no grace period, no advisory blacklist. +/// +/// # Example +/// ```no_run +/// use dcap_qvl::SimplePolicy; +/// use dcap_qvl::TcbStatus; +/// +/// let now = 1_700_000_000u64; // unix timestamp +/// +/// // Strict: only UpToDate, collateral must not be expired +/// let policy = SimplePolicy::strict(now); +/// +/// // With 90-day collateral grace period +/// use core::time::Duration; +/// let policy = SimplePolicy::strict(now) +/// .allow_status(TcbStatus::SWHardeningNeeded) +/// .collateral_grace_period(Duration::from_secs(90 * 24 * 3600)) +/// .reject_advisory("INTEL-SA-00334"); +/// ``` +#[derive(Clone, Debug)] +pub struct SimplePolicy { + acceptable_statuses: u8, + + // Current time + grace periods (mutually exclusive, default 0 = no tolerance) + now: u64, + collateral_grace_period: u64, + platform_grace_period: u64, + qe_grace_period: u64, + + // TCB evaluation + min_tcb_eval_data_number: Option, + + // Advisory blacklist (quote is rejected if any advisory is in this set) + rejected_advisory_ids: Vec, + + // Platform flags (default false = reject if True) + allow_dynamic_platform: bool, + allow_cached_keys: bool, + allow_smt: bool, + + // SGX type whitelist (None = skip check) + accepted_sgx_types: Option>, +} + +impl SimplePolicy { + const UP_TO_DATE: u8 = 1 << 0; + const SW_HARDENING_NEEDED: u8 = 1 << 1; + const CONFIGURATION_NEEDED: u8 = 1 << 2; + const CONFIGURATION_AND_SW_HARDENING_NEEDED: u8 = 1 << 3; + const OUT_OF_DATE: u8 = 1 << 4; + const OUT_OF_DATE_CONFIGURATION_NEEDED: u8 = 1 << 5; + + fn status_to_flag(status: TcbStatus) -> u8 { + match status { + TcbStatus::UpToDate => Self::UP_TO_DATE, + TcbStatus::SWHardeningNeeded => Self::SW_HARDENING_NEEDED, + TcbStatus::ConfigurationNeeded => Self::CONFIGURATION_NEEDED, + TcbStatus::ConfigurationAndSWHardeningNeeded => { + Self::CONFIGURATION_AND_SW_HARDENING_NEEDED + } + TcbStatus::OutOfDate => Self::OUT_OF_DATE, + TcbStatus::OutOfDateConfigurationNeeded => Self::OUT_OF_DATE_CONFIGURATION_NEEDED, + TcbStatus::Revoked => 0, + } + } + + fn new_with_statuses(now: u64, acceptable_statuses: u8) -> Self { + Self { + acceptable_statuses, + now, + collateral_grace_period: 0, + platform_grace_period: 0, + qe_grace_period: 0, + min_tcb_eval_data_number: None, + rejected_advisory_ids: Vec::new(), + allow_dynamic_platform: false, + allow_cached_keys: false, + allow_smt: false, + accepted_sgx_types: None, + } + } + + /// Create a strict policy: only `UpToDate` status is accepted, + /// no grace period, no advisory blacklist. + pub fn strict(now_secs: u64) -> Self { + Self::new_with_statuses(now_secs, Self::UP_TO_DATE) + } + + /// Allow an additional TCB status. + pub fn allow_status(mut self, status: TcbStatus) -> Self { + self.acceptable_statuses |= Self::status_to_flag(status); + self + } + + /// Set collateral grace period (default: zero). Accepts quotes where + /// `earliest_expiration_date + grace_period >= now`. + pub fn collateral_grace_period(mut self, duration: Duration) -> Self { + self.collateral_grace_period = duration.as_secs(); + self + } + + /// Set platform grace period (default: zero). When TCB status is + /// OutOfDate or OutOfDateConfigurationNeeded, accepts quotes where + /// `tcb_level_date_tag + grace_period >= now`. Skipped for UpToDate/ConfigNeeded/SWHardening. + pub fn platform_grace_period(mut self, duration: Duration) -> Self { + self.platform_grace_period = duration.as_secs(); + self + } + + /// Set QE grace period (default: zero). When QE TCB status is `OutOfDate`, + /// accepts quotes where `qe_tcb_level.tcb_date + grace_period >= now`. + pub fn qe_grace_period(mut self, duration: Duration) -> Self { + self.qe_grace_period = duration.as_secs(); + self + } + + /// Set minimum TCB evaluation data number. Rejects quotes with + /// `tcb_eval_data_number` below this threshold. + pub fn min_tcb_eval_data_number(mut self, min: u32) -> Self { + self.min_tcb_eval_data_number = Some(min); + self + } + + /// Reject a specific advisory ID. Quotes containing any advisory in the + /// rejected set fail validation. By default the set is empty, allowing all + /// advisory IDs. + pub fn reject_advisory(mut self, id: impl Into) -> Self { + self.rejected_advisory_ids.push(id.into()); + self + } + + /// Reject multiple advisory IDs at once. + pub fn reject_advisories(mut self, ids: &[impl AsRef]) -> Self { + self.rejected_advisory_ids + .extend(ids.iter().map(|id| id.as_ref().to_string())); + self + } + + /// Set whether dynamic platforms are allowed. If `false` (default), rejects + /// quotes where `dynamic_platform` is `True`. + pub fn allow_dynamic_platform(mut self, allow: bool) -> Self { + self.allow_dynamic_platform = allow; + self + } + + /// Set whether cached keys are allowed. If `false` (default), rejects + /// quotes where `cached_keys` is `True`. + pub fn allow_cached_keys(mut self, allow: bool) -> Self { + self.allow_cached_keys = allow; + self + } + + /// Set whether SMT (simultaneous multithreading / hyperthreading) is allowed. + /// If `false` (default), rejects quotes where `smt_enabled` is `True`. + pub fn allow_smt(mut self, allow: bool) -> Self { + self.allow_smt = allow; + self + } + + /// Set accepted SGX types (0=Standard, 1=Scalable, 2=ScalableWithIntegrity). + /// Rejects quotes with `sgx_type` not in this list. Default: skip check. + pub fn accepted_sgx_types(mut self, types: &[u8]) -> Self { + self.accepted_sgx_types = Some(types.to_vec()); + self + } + + /// Check if a TCB status is acceptable according to this policy. + pub fn is_status_acceptable(&self, status: TcbStatus) -> bool { + let flag = Self::status_to_flag(status); + (self.acceptable_statuses & flag) != 0 + } +} + +impl Policy for SimplePolicy { + fn validate(&self, data: &SupplementalData) -> Result<()> { + fn within_grace(date_tag: u64, grace_period: u64, now: u64) -> bool { + date_tag.saturating_add(grace_period) >= now + } + + fn advisory_rejected(rejected_advisory_ids: &[String], id: &str) -> bool { + rejected_advisory_ids + .iter() + .any(|a| a.eq_ignore_ascii_case(id)) + } + + // 1. TCB status whitelist + if !self.is_status_acceptable(data.tcb.status) { + bail!( + "TCB status {:?} is not acceptable by policy", + data.tcb.status + ); + } + + // 3. Collateral expiration: earliest_expiration + grace >= now + if data + .earliest_expiration_date + .saturating_add(self.collateral_grace_period) + < self.now + { + bail!( + "Collateral expired: earliest_expiration {} + grace {} < now {}", + data.earliest_expiration_date, + self.collateral_grace_period, + self.now + ); + } + + // 4. Platform TCB freshness: platform tcb_date_tag + grace >= now. + let platform_is_out_of_date = matches!( + data.platform.tcb_level.tcb_status, + TcbStatus::OutOfDate | TcbStatus::OutOfDateConfigurationNeeded + ); + let platform_in_grace = platform_is_out_of_date + && within_grace( + data.platform.tcb_date_tag, + self.platform_grace_period, + self.now, + ); + if platform_is_out_of_date && !platform_in_grace { + bail!( + "Platform TCB too old: tcb_date_tag {} + grace {} < now {}", + data.platform.tcb_date_tag, + self.platform_grace_period, + self.now + ); + } + + // 4b. QE TCB freshness: QE tcb_date + grace >= now. + let qe_tcb_date_tag = parse_rfc3339_unix_secs(&data.qe.tcb_level.tcb_date) + .map_err(|e| anyhow::anyhow!("Failed to parse QE TCB date: {e}"))?; + let qe_is_out_of_date = data.qe.tcb_level.tcb_status == TcbStatus::OutOfDate; + let qe_in_grace = + qe_is_out_of_date && within_grace(qe_tcb_date_tag, self.qe_grace_period, self.now); + if qe_is_out_of_date && !qe_in_grace { + bail!( + "QE TCB too old: tcb_date_tag {} + grace {} < now {}", + qe_tcb_date_tag, + self.qe_grace_period, + self.now + ); + } + + // 2. Advisory ID blacklist. + for id in &data.platform.tcb_level.advisory_ids { + if advisory_rejected(&self.rejected_advisory_ids, id) { + bail!("Advisory ID {id} is rejected by policy"); + } + } + for id in &data.qe.tcb_level.advisory_ids { + if advisory_rejected(&self.rejected_advisory_ids, id) { + bail!("Advisory ID {id} is rejected by policy"); + } + } + + // 5. Minimum TCB evaluation data number + if let Some(min) = self.min_tcb_eval_data_number { + if data.tcb.eval_data_number < min { + bail!( + "TCB eval data number {} is below minimum {}", + data.tcb.eval_data_number, + min + ); + } + } + + // 6. Dynamic platform flag + if !self.allow_dynamic_platform && data.platform.pck.dynamic_platform == PckCertFlag::True { + bail!("Dynamic platform is not allowed by policy"); + } + + // 7. Cached keys flag + if !self.allow_cached_keys && data.platform.pck.cached_keys == PckCertFlag::True { + bail!("Cached keys are not allowed by policy"); + } + + // 8. SMT flag + if !self.allow_smt && data.platform.pck.smt_enabled == PckCertFlag::True { + bail!("SMT (hyperthreading) is not allowed by policy"); + } + + // 9. SGX type whitelist + if let Some(ref types) = self.accepted_sgx_types { + if !types.contains(&data.platform.pck.sgx_type) { + bail!( + "SGX type {} is not in accepted types {:?}", + data.platform.pck.sgx_type, + types + ); + } + } + + Ok(()) + } +} + +/// JSON-serializable configuration for [`SimplePolicy`]. +/// +/// All fields default to the strict values (zero / empty / false). +/// Pass as JSON from FFI (Go, Python) to configure verification policy. +/// +/// ```json +/// { +/// "allowed_statuses": ["UpToDate", "SWHardeningNeeded"], +/// "rejected_advisory_ids": ["INTEL-SA-00334"], +/// "collateral_grace_period_secs": 2592000, +/// "allow_smt": true +/// } +/// ``` +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct SimplePolicyConfig { + #[serde(default)] + pub allowed_statuses: Vec, + #[serde(default)] + pub rejected_advisory_ids: Vec, + #[serde(default)] + pub collateral_grace_period_secs: u64, + #[serde(default)] + pub platform_grace_period_secs: u64, + #[serde(default)] + pub qe_grace_period_secs: u64, + #[serde(default)] + pub min_tcb_eval_data_number: u32, + #[serde(default)] + pub allow_dynamic_platform: bool, + #[serde(default)] + pub allow_cached_keys: bool, + #[serde(default)] + pub allow_smt: bool, + #[serde(default)] + pub accepted_sgx_types: Option>, +} + +impl SimplePolicyConfig { + /// Build a [`SimplePolicy`] from this config + current timestamp. + /// + /// Default config (all fields zero/empty) produces `SimplePolicy::strict(now)`. + pub fn into_policy(self, now_secs: u64) -> SimplePolicy { + let mut policy = if self.allowed_statuses.is_empty() { + SimplePolicy::strict(now_secs) + } else { + let mut p = SimplePolicy::new_with_statuses(now_secs, 0); + for status in self.allowed_statuses { + p = p.allow_status(status); + } + p + }; + for id in self.rejected_advisory_ids { + policy = policy.reject_advisory(id); + } + if self.collateral_grace_period_secs > 0 { + policy = policy + .collateral_grace_period(Duration::from_secs(self.collateral_grace_period_secs)); + } + if self.platform_grace_period_secs > 0 { + policy = + policy.platform_grace_period(Duration::from_secs(self.platform_grace_period_secs)); + } + if self.qe_grace_period_secs > 0 { + policy = policy.qe_grace_period(Duration::from_secs(self.qe_grace_period_secs)); + } + if self.min_tcb_eval_data_number > 0 { + policy = policy.min_tcb_eval_data_number(self.min_tcb_eval_data_number); + } + policy = policy.allow_dynamic_platform(self.allow_dynamic_platform); + policy = policy.allow_cached_keys(self.allow_cached_keys); + policy = policy.allow_smt(self.allow_smt); + if let Some(types) = self.accepted_sgx_types { + policy = policy.accepted_sgx_types(&types); + } + policy + } +} + +#[cfg(test)] +#[allow(clippy::unwrap_used)] +mod tests { + use super::*; + use crate::policy::{PckIdentity, PlatformInfo, QeInfo, TcbVerdict}; + use crate::tcb_info::TcbStatus::*; + + fn make_test_supplemental(tcb_status: TcbStatus) -> SupplementalData { + use crate::qe_identity::{QeTcb, QeTcbLevel}; + use crate::tcb_info::{Tcb, TcbComponents, TcbLevel}; + + SupplementalData { + tee_type: 0, + tcb: TcbVerdict { + status: tcb_status, + advisory_ids: vec![], + eval_data_number: 17, + }, + platform: PlatformInfo { + tcb_level: TcbLevel { + tcb: Tcb { + sgx_components: vec![TcbComponents { svn: 0 }; 16], + tdx_components: vec![], + pce_svn: 13, + }, + tcb_date: "2023-07-22T00:00:00Z".to_string(), + tcb_status, + advisory_ids: vec![], + }, + tcb_date_tag: 1_690_000_000, // ~2023-07-22 + pck: PckIdentity { + ppid: vec![0u8; 16], + cpu_svn: [0u8; 16], + pce_svn: 13, + pce_id: vec![0u8; 2], + fmspc: [0u8; 6], + sgx_type: 0, + platform_instance_id: None, + dynamic_platform: PckCertFlag::Undefined, + cached_keys: PckCertFlag::Undefined, + smt_enabled: PckCertFlag::Undefined, + platform_provider_id: None, + }, + root_key_id: [0u8; 48], + pck_crl_num: 1, + root_ca_crl_num: 1, + }, + qe: QeInfo { + tcb_level: QeTcbLevel { + tcb: QeTcb { isvsvn: 8 }, + tcb_date: "2024-03-13T00:00:00Z".to_string(), + tcb_status: UpToDate, + advisory_ids: vec![], + }, + report: crate::quote::EnclaveReport { + cpu_svn: [0u8; 16], + misc_select: 0, + reserved1: [0u8; 28], + attributes: [0u8; 16], + mr_enclave: [0u8; 32], + reserved2: [0u8; 32], + mr_signer: [0u8; 32], + reserved3: [0u8; 96], + isv_prod_id: 1, + isv_svn: 8, + reserved4: [0u8; 60], + report_data: [0u8; 64], + }, + tcb_eval_data_number: 17, + }, + report: crate::quote::Report::SgxEnclave(crate::quote::EnclaveReport { + cpu_svn: [0u8; 16], + misc_select: 0, + reserved1: [0u8; 28], + attributes: [0u8; 16], + mr_enclave: [0u8; 32], + reserved2: [0u8; 32], + mr_signer: [0u8; 32], + reserved3: [0u8; 96], + isv_prod_id: 0, + isv_svn: 0, + reserved4: [0u8; 60], + report_data: [0u8; 64], + }), + earliest_issue_date: 1_690_000_000, + latest_issue_date: 1_690_100_000, + earliest_expiration_date: 1_703_000_000, // ~2023-12-19 + qe_iden_earliest_issue_date: 1_690_000_000, + qe_iden_latest_issue_date: 1_690_100_000, + qe_iden_earliest_expiration_date: 1_703_000_000, + } + } + + // -- TCB status checks -- + + #[test] + fn policy_strict_accepts_up_to_date() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_702_000_000); // within collateral window + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_strict_rejects_sw_hardening() { + let data = make_test_supplemental(SWHardeningNeeded); + let policy = SimplePolicy::strict(1_702_000_000); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("TCB status"), "{err}"); + } + + #[test] + fn policy_out_of_date_with_fresh_tcb_date_accepts() { + let mut data = make_test_supplemental(OutOfDate); + data.platform.tcb_date_tag = 1_702_000_000; + let policy = SimplePolicy::strict(1_702_000_000).allow_status(OutOfDate); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_allow_status_builder() { + let data = make_test_supplemental(SWHardeningNeeded); + let policy = SimplePolicy::strict(1_702_000_000).allow_status(SWHardeningNeeded); + assert!(policy.validate(&data).is_ok()); + } + + // -- Advisory ID blacklist -- + + #[test] + fn policy_allows_advisory_when_not_blacklisted() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_rejects_blacklisted_advisory() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000).reject_advisory("INTEL-SA-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_advisory_blacklist_case_insensitive() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000).reject_advisory("intel-sa-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_reject_advisories_batch() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00820".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00820".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000) + .reject_advisories(&["INTEL-SA-00615", "INTEL-SA-00820"]); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00820"), "{err}"); + } + + #[test] + fn policy_empty_advisories_passes() { + let data = make_test_supplemental(UpToDate); + assert!(data.tcb.advisory_ids.is_empty()); + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + // -- Collateral grace period -- + + #[test] + fn policy_collateral_expired_no_grace_rejects() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_704_000_000); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Collateral expired"), "{err}"); + } + + #[test] + fn policy_collateral_expired_with_grace_accepts() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_704_000_000) + .collateral_grace_period(Duration::from_secs(2_000_000)); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_collateral_expired_grace_too_short_rejects() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_704_000_000) + .collateral_grace_period(Duration::from_secs(500_000)); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Collateral expired"), "{err}"); + } + + #[test] + fn policy_collateral_not_expired_zero_grace_passes() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + // -- Platform grace period -- + + #[test] + fn policy_platform_grace_skipped_for_up_to_date() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_platform_grace_skipped_for_sw_hardening() { + let data = make_test_supplemental(SWHardeningNeeded); + let policy = SimplePolicy::strict(1_702_000_000).allow_status(SWHardeningNeeded); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_platform_grace_skipped_for_config_needed() { + let data = make_test_supplemental(ConfigurationNeeded); + let policy = SimplePolicy::strict(1_702_000_000).allow_status(ConfigurationNeeded); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_platform_grace_checked_for_out_of_date_rejects() { + let data = make_test_supplemental(OutOfDate); + let policy = SimplePolicy::strict(1_702_000_000).allow_status(OutOfDate); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Platform TCB too old"), "{err}"); + } + + #[test] + fn policy_platform_grace_checked_for_out_of_date_accepts_with_grace() { + let data = make_test_supplemental(OutOfDate); + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDate) + .platform_grace_period(Duration::from_secs(13_000_000)); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_platform_grace_too_short_rejects() { + let data = make_test_supplemental(OutOfDate); + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDate) + .platform_grace_period(Duration::from_secs(11_000_000)); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Platform TCB too old"), "{err}"); + } + + #[test] + fn policy_platform_grace_checked_for_out_of_date_config_needed() { + let data = make_test_supplemental(OutOfDateConfigurationNeeded); + let policy = SimplePolicy::strict(1_702_000_000).allow_status(OutOfDateConfigurationNeeded); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Platform TCB too old"), "{err}"); + } + + // -- min_tcb_eval_data_number -- + + #[test] + fn policy_min_eval_num_rejects_below() { + let data = make_test_supplemental(UpToDate); + assert_eq!(data.tcb.eval_data_number, 17); + let policy = SimplePolicy::strict(1_702_000_000).min_tcb_eval_data_number(20); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("below minimum"), "{err}"); + } + + #[test] + fn policy_min_eval_num_accepts_equal() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_702_000_000).min_tcb_eval_data_number(17); + assert!(policy.validate(&data).is_ok()); + } + + // -- Platform flags -- + + #[test] + fn policy_rejects_dynamic_platform_true() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.dynamic_platform = PckCertFlag::True; + let policy = SimplePolicy::strict(1_702_000_000); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Dynamic platform"), "{err}"); + } + + #[test] + fn policy_allows_dynamic_platform_when_configured() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.dynamic_platform = PckCertFlag::True; + let policy = SimplePolicy::strict(1_702_000_000).allow_dynamic_platform(true); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_undefined_platform_flags_pass() { + let data = make_test_supplemental(UpToDate); + assert_eq!(data.platform.pck.dynamic_platform, PckCertFlag::Undefined); + assert_eq!(data.platform.pck.cached_keys, PckCertFlag::Undefined); + assert_eq!(data.platform.pck.smt_enabled, PckCertFlag::Undefined); + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_rejects_smt_true() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.smt_enabled = PckCertFlag::True; + let policy = SimplePolicy::strict(1_702_000_000); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("SMT"), "{err}"); + } + + #[test] + fn policy_rejects_cached_keys_true() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.cached_keys = PckCertFlag::True; + let policy = SimplePolicy::strict(1_702_000_000); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("Cached keys"), "{err}"); + } + + // -- SGX type whitelist -- + + #[test] + fn policy_sgx_type_not_configured_passes() { + let data = make_test_supplemental(UpToDate); + let policy = SimplePolicy::strict(1_702_000_000); + assert!(policy.validate(&data).is_ok()); + } + + #[test] + fn policy_sgx_type_whitelist_rejects() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.sgx_type = 1; + let policy = SimplePolicy::strict(1_702_000_000).accepted_sgx_types(&[0]); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("SGX type"), "{err}"); + } + + #[test] + fn policy_sgx_type_whitelist_accepts() { + let mut data = make_test_supplemental(UpToDate); + data.platform.pck.sgx_type = 1; + let policy = SimplePolicy::strict(1_702_000_000).accepted_sgx_types(&[0, 1, 2]); + assert!(policy.validate(&data).is_ok()); + } + + // -- Advisory blacklist during grace -- + + #[test] + fn policy_blacklist_checked_during_collateral_grace() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_704_000_000) + .collateral_grace_period(Duration::from_secs(2_000_000)) + .reject_advisory("INTEL-SA-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_blacklist_checked_during_platform_grace() { + let mut data = make_test_supplemental(OutOfDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDate) + .platform_grace_period(Duration::from_secs(13_000_000)) + .reject_advisory("INTEL-SA-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_blacklist_checked_for_out_of_date_config_needed() { + let mut data = make_test_supplemental(OutOfDateConfigurationNeeded); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDateConfigurationNeeded) + .platform_grace_period(Duration::from_secs(13_000_000)) + .reject_advisory("INTEL-SA-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_blacklist_checked_without_grace() { + let mut data = make_test_supplemental(UpToDate); + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.platform.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + let policy = SimplePolicy::strict(1_702_000_000).reject_advisory("INTEL-SA-00615"); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("INTEL-SA-00615"), "{err}"); + } + + #[test] + fn policy_platform_grace_does_not_cover_qe_out_of_date() { + let mut data = make_test_supplemental(OutOfDate); + data.platform.tcb_level.tcb_status = UpToDate; + data.platform.tcb_level.advisory_ids = vec![]; + data.platform.tcb_date_tag = 1_702_000_000; + data.qe.tcb_level.tcb_status = OutOfDate; + data.qe.tcb_level.tcb_date = "2023-07-22T00:00:00Z".to_string(); + data.qe.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDate) + .platform_grace_period(Duration::from_secs(13_000_000)); + let err = policy.validate(&data).unwrap_err().to_string(); + assert!(err.contains("QE TCB too old"), "{err}"); + } + + #[test] + fn policy_qe_grace_accepts_qe_out_of_date() { + let mut data = make_test_supplemental(OutOfDate); + data.platform.tcb_level.tcb_status = UpToDate; + data.platform.tcb_level.advisory_ids = vec![]; + data.qe.tcb_level.tcb_status = OutOfDate; + data.qe.tcb_level.tcb_date = "2023-07-22T00:00:00Z".to_string(); + data.qe.tcb_level.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + data.tcb.advisory_ids = vec!["INTEL-SA-00615".to_string()]; + + let policy = SimplePolicy::strict(1_702_000_000) + .allow_status(OutOfDate) + .qe_grace_period(Duration::from_secs(13_000_000)); + assert!(policy.validate(&data).is_ok()); + } +} diff --git a/src/python.rs b/src/python.rs index ce8d79d..91e6411 100644 --- a/src/python.rs +++ b/src/python.rs @@ -1,14 +1,20 @@ +use core::time::Duration; + use pyo3::exceptions::PyValueError; use pyo3::prelude::*; -use pyo3::types::PyBytes; +use pyo3::types::{PyBytes, PyDict, PyTuple}; use pyo3_async_runtimes::tokio::future_into_py; use serde_json; +#[cfg(feature = "rego")] +use crate::policy::{RegoPolicy, RegoPolicySet}; use crate::{ collateral::get_collateral_for_fmspc, intel, + policy::SimplePolicy, quote::{EnclaveReport, Header, Quote, Report, TDReport10, TDReport15}, - verify::{verify, VerifiedReport}, + tcb_info::TcbStatus, + verify::{QuoteVerifier, VerifiedReport}, QuoteCollateralV3, }; @@ -21,31 +27,43 @@ pub struct PyQuoteCollateralV3 { #[pymethods] impl PyQuoteCollateralV3 { #[new] - fn new( - pck_crl_issuer_chain: String, - root_ca_crl: Vec, - pck_crl: Vec, - tcb_info_issuer_chain: String, - tcb_info: String, - tcb_info_signature: Vec, - qe_identity_issuer_chain: String, - qe_identity: String, - qe_identity_signature: Vec, - ) -> Self { - Self { + #[pyo3(signature = (*args, **kwargs))] + fn new(args: &Bound<'_, PyTuple>, kwargs: Option<&Bound<'_, PyDict>>) -> PyResult { + fn get_arg( + args: &Bound<'_, PyTuple>, + kwargs: Option<&Bound<'_, PyDict>>, + index: usize, + name: &str, + ) -> PyResult + where + T: for<'py> pyo3::FromPyObject<'py>, + { + if let Ok(value) = args.get_item(index) { + return value.extract::(); + } + let value = kwargs + .and_then(|kw| kw.get_item(name).transpose()) + .transpose()? + .ok_or_else(|| { + PyValueError::new_err(format!("missing required argument: {name}")) + })?; + value.extract::() + } + + Ok(Self { inner: QuoteCollateralV3 { - pck_crl_issuer_chain, - root_ca_crl, - pck_crl, - tcb_info_issuer_chain, - tcb_info, - tcb_info_signature, - qe_identity_issuer_chain, - qe_identity, - qe_identity_signature, + pck_crl_issuer_chain: get_arg(args, kwargs, 0, "pck_crl_issuer_chain")?, + root_ca_crl: get_arg(args, kwargs, 1, "root_ca_crl")?, + pck_crl: get_arg(args, kwargs, 2, "pck_crl")?, + tcb_info_issuer_chain: get_arg(args, kwargs, 3, "tcb_info_issuer_chain")?, + tcb_info: get_arg(args, kwargs, 4, "tcb_info")?, + tcb_info_signature: get_arg(args, kwargs, 5, "tcb_info_signature")?, + qe_identity_issuer_chain: get_arg(args, kwargs, 6, "qe_identity_issuer_chain")?, + qe_identity: get_arg(args, kwargs, 7, "qe_identity")?, + qe_identity_signature: get_arg(args, kwargs, 8, "qe_identity_signature")?, pck_certificate_chain: None, }, - } + }) } #[getter] @@ -427,6 +445,177 @@ impl PyPckExtension { } } +/// Verification policy with builder pattern. +/// +/// Mirrors the Rust `SimplePolicy` API. Use `SimplePolicy.strict(now_secs)` to create +/// a strict policy (only UpToDate), then chain builder methods to relax constraints. +#[pyclass] +#[derive(Clone)] +pub struct PySimplePolicy { + inner: SimplePolicy, +} + +fn parse_tcb_status(s: &str) -> PyResult { + match s { + "UpToDate" => Ok(TcbStatus::UpToDate), + "SWHardeningNeeded" => Ok(TcbStatus::SWHardeningNeeded), + "ConfigurationNeeded" => Ok(TcbStatus::ConfigurationNeeded), + "ConfigurationAndSWHardeningNeeded" => Ok(TcbStatus::ConfigurationAndSWHardeningNeeded), + "OutOfDate" => Ok(TcbStatus::OutOfDate), + "OutOfDateConfigurationNeeded" => Ok(TcbStatus::OutOfDateConfigurationNeeded), + "Revoked" => Ok(TcbStatus::Revoked), + _ => Err(PyValueError::new_err(format!("Unknown TCB status: {s}"))), + } +} + +#[pymethods] +impl PySimplePolicy { + /// Create a strict policy: only `UpToDate` status, no grace, no advisory blacklist. + #[staticmethod] + fn strict(now_secs: u64) -> Self { + Self { + inner: SimplePolicy::strict(now_secs), + } + } + + /// Allow an additional TCB status (e.g. "SWHardeningNeeded"). + fn allow_status(&self, status: &str) -> PyResult { + let s = parse_tcb_status(status)?; + Ok(Self { + inner: self.inner.clone().allow_status(s), + }) + } + + /// Reject a specific advisory ID (e.g. "INTEL-SA-00334"). + fn reject_advisory(&self, advisory_id: &str) -> Self { + Self { + inner: self.inner.clone().reject_advisory(advisory_id), + } + } + + /// Reject multiple advisory IDs at once. + fn reject_advisories(&self, advisory_ids: Vec) -> Self { + Self { + inner: self.inner.clone().reject_advisories(&advisory_ids), + } + } + + /// Set collateral grace period in seconds. + fn collateral_grace_period(&self, secs: u64) -> Self { + Self { + inner: self + .inner + .clone() + .collateral_grace_period(Duration::from_secs(secs)), + } + } + + /// Set platform grace period in seconds. + fn platform_grace_period(&self, secs: u64) -> Self { + Self { + inner: self + .inner + .clone() + .platform_grace_period(Duration::from_secs(secs)), + } + } + + /// Set QE grace period in seconds. + fn qe_grace_period(&self, secs: u64) -> Self { + Self { + inner: self + .inner + .clone() + .qe_grace_period(Duration::from_secs(secs)), + } + } + + /// Set minimum TCB evaluation data number. + fn min_tcb_eval_data_number(&self, min: u32) -> Self { + Self { + inner: self.inner.clone().min_tcb_eval_data_number(min), + } + } + + /// Set whether dynamic platforms are allowed. + fn allow_dynamic_platform(&self, allow: bool) -> Self { + Self { + inner: self.inner.clone().allow_dynamic_platform(allow), + } + } + + /// Set whether cached keys are allowed. + fn allow_cached_keys(&self, allow: bool) -> Self { + Self { + inner: self.inner.clone().allow_cached_keys(allow), + } + } + + /// Set whether SMT (hyperthreading) is allowed. + fn allow_smt(&self, allow: bool) -> Self { + Self { + inner: self.inner.clone().allow_smt(allow), + } + } + + /// Set accepted SGX types (e.g. [0, 1, 2]). + fn accepted_sgx_types(&self, types: Vec) -> Self { + Self { + inner: self.inner.clone().accepted_sgx_types(&types), + } + } +} + +#[cfg(feature = "rego")] +#[pyclass] +pub struct PyRegoPolicy { + inner: RegoPolicy, +} + +#[cfg(feature = "rego")] +#[pymethods] +impl PyRegoPolicy { + #[new] + fn new(policy_json: &str) -> PyResult { + let inner = RegoPolicy::new(policy_json) + .map_err(|e| PyValueError::new_err(format!("Invalid Rego policy: {e}")))?; + Ok(Self { inner }) + } + + #[staticmethod] + fn with_rego(policy_json: &str, rego_source: &str) -> PyResult { + let inner = RegoPolicy::with_rego(policy_json, rego_source) + .map_err(|e| PyValueError::new_err(format!("Invalid Rego policy: {e}")))?; + Ok(Self { inner }) + } +} + +#[cfg(feature = "rego")] +#[pyclass] +pub struct PyRegoPolicySet { + inner: RegoPolicySet, +} + +#[cfg(feature = "rego")] +#[pymethods] +impl PyRegoPolicySet { + #[new] + fn new(policy_jsons: Vec) -> PyResult { + let policy_refs: Vec<&str> = policy_jsons.iter().map(String::as_str).collect(); + let inner = RegoPolicySet::new(&policy_refs) + .map_err(|e| PyValueError::new_err(format!("Invalid Rego policy set: {e}")))?; + Ok(Self { inner }) + } + + #[staticmethod] + fn with_rego(policy_jsons: Vec, rego_source: &str) -> PyResult { + let policy_refs: Vec<&str> = policy_jsons.iter().map(String::as_str).collect(); + let inner = RegoPolicySet::with_rego(&policy_refs, rego_source) + .map_err(|e| PyValueError::new_err(format!("Invalid Rego policy set: {e}")))?; + Ok(Self { inner }) + } +} + #[pyclass] pub struct PyQuote { inner: Quote, @@ -511,11 +700,12 @@ impl PyQuote { Ok(v) => v, Err(_) => return Ok(None), }; - let mut end = raw.len(); - while end > 0 && raw[end - 1] == 0 { - end -= 1; - } - Ok(Some(PyBytes::new(py, &raw[..end]).into())) + let trimmed = raw + .iter() + .rposition(|byte| *byte != 0) + .and_then(|end| raw.get(..=end)) + .unwrap_or(&[]); + Ok(Some(PyBytes::new(py, trimmed).into())) } /// Parse the Intel SGX extension from the leaf PCK certificate. @@ -537,20 +727,76 @@ impl PyQuote { } } +/// Result of cryptographic quote verification (phase 1). +/// +/// Use `validate(policy)` to apply a policy and get a `VerifiedReport`. +/// Use `into_report_unchecked()` to skip policy validation (dangerous). +#[pyclass] +pub struct PyQuoteVerificationResult { + inner: Option, +} + +#[pymethods] +impl PyQuoteVerificationResult { + /// Validate against a policy, returning a VerifiedReport. Consumes the result. + fn validate<'py>(&mut self, policy: &Bound<'py, PyAny>) -> PyResult { + let result = self + .inner + .take() + .ok_or_else(|| PyValueError::new_err("verification result already consumed"))?; + + let report = if let Ok(policy) = policy.extract::>() { + result.validate(&policy.inner) + } else { + #[cfg(feature = "rego")] + { + if let Ok(policy) = policy.extract::>() { + result.validate(&policy.inner) + } else if let Ok(policy) = policy.extract::>() { + result.validate(&policy.inner) + } else { + return Err(PyValueError::new_err( + "policy must be SimplePolicy, RegoPolicy, or RegoPolicySet", + )); + } + } + #[cfg(not(feature = "rego"))] + { + return Err(PyValueError::new_err("policy must be SimplePolicy")); + } + } + .map_err(|e| PyValueError::new_err(format!("Policy validation failed: {e:?}")))?; + Ok(PyVerifiedReport { inner: report }) + } + + /// Get VerifiedReport without policy validation. Consumes the result. + /// + /// WARNING: Skips all policy checks. Use only when you handle validation externally. + fn into_report_unchecked(mut slf: PyRefMut<'_, Self>) -> PyResult { + let result = slf + .inner + .take() + .ok_or_else(|| PyValueError::new_err("verification result already consumed"))?; + Ok(PyVerifiedReport { + inner: result.into_report_unchecked(), + }) + } +} + #[pyfunction] fn py_verify( raw_quote: &Bound<'_, PyBytes>, collateral: &PyQuoteCollateralV3, now_secs: u64, -) -> PyResult { +) -> PyResult { let quote_bytes = raw_quote.as_bytes(); - - match verify(quote_bytes, &collateral.inner, now_secs) { - Ok(verified_report) => Ok(PyVerifiedReport { - inner: verified_report, - }), - Err(e) => Err(PyValueError::new_err(format!("Verification failed: {e:?}"))), - } + let verifier = QuoteVerifier::new_prod(crate::verify::ring::backend()); + let result = verifier + .verify(quote_bytes, collateral.inner.clone(), now_secs) + .map_err(|e| PyValueError::new_err(format!("Verification failed: {e:?}")))?; + Ok(PyQuoteVerificationResult { + inner: Some(result), + }) } #[pyfunction] @@ -559,20 +805,17 @@ fn py_verify_with_root_ca( collateral: &PyQuoteCollateralV3, root_ca_der: &Bound<'_, PyBytes>, now_secs: u64, -) -> PyResult { +) -> PyResult { let quote_bytes = raw_quote.as_bytes(); let root_ca = root_ca_der.as_bytes(); - let verifier = crate::verify::QuoteVerifier::new( - root_ca.to_vec(), - crate::verify::default_crypto::backend(), - ); - match verifier.verify(quote_bytes, &collateral.inner, now_secs) { - Ok(verified_report) => Ok(PyVerifiedReport { - inner: verified_report, - }), - Err(e) => Err(PyValueError::new_err(format!("Verification failed: {e:?}"))), - } + let verifier = QuoteVerifier::new(root_ca.to_vec(), crate::verify::ring::backend()); + let result = verifier + .verify(quote_bytes, collateral.inner.clone(), now_secs) + .map_err(|e| PyValueError::new_err(format!("Verification failed: {e:?}")))?; + Ok(PyQuoteVerificationResult { + inner: Some(result), + }) } #[pyfunction] @@ -619,6 +862,12 @@ pub fn register_module(m: &Bound<'_, PyModule>) -> PyResult<()> { m.add_class::()?; m.add_class::()?; m.add_class::()?; + m.add_class::()?; + #[cfg(feature = "rego")] + m.add_class::()?; + #[cfg(feature = "rego")] + m.add_class::()?; + m.add_class::()?; m.add_class::()?; m.add_function(wrap_pyfunction!(py_verify, m)?)?; m.add_function(wrap_pyfunction!(py_verify_with_root_ca, m)?)?; diff --git a/src/tcb_info.rs b/src/tcb_info.rs index a5905cf..57f1f40 100644 --- a/src/tcb_info.rs +++ b/src/tcb_info.rs @@ -1,3 +1,5 @@ +use std::cmp::Ordering; + use alloc::string::String; use alloc::vec::Vec; use derive_more::Display; @@ -57,22 +59,32 @@ pub struct TcbComponents { pub svn: u8, } -#[derive( - Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Serialize, Deserialize, Display, -)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, Serialize, Deserialize, Display)] #[display("{_variant}")] #[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] #[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] pub enum TcbStatus { UpToDate, - OutOfDateConfigurationNeeded, - OutOfDate, - ConfigurationAndSWHardeningNeeded, - ConfigurationNeeded, SWHardeningNeeded, + ConfigurationNeeded, + ConfigurationAndSWHardeningNeeded, + OutOfDate, + OutOfDateConfigurationNeeded, Revoked, } +impl Ord for TcbStatus { + fn cmp(&self, other: &Self) -> Ordering { + self.severity().cmp(&other.severity()) + } +} + +impl PartialOrd for TcbStatus { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + impl TcbStatus { fn severity(&self) -> u8 { match self { @@ -86,19 +98,19 @@ impl TcbStatus { } } - /// Returns true if the TCB status is acceptable to let the caller decide - /// whether to accept the quote or not. + /// Converge platform TCB status with QE TCB status. /// - /// Currently, `Revoked` status is considered invalid and will cause the verification to fail. - pub(crate) fn is_valid(&self) -> bool { - match self { - Self::UpToDate => true, - Self::SWHardeningNeeded => true, - Self::ConfigurationNeeded => true, - Self::ConfigurationAndSWHardeningNeeded => true, - Self::OutOfDate => true, - Self::OutOfDateConfigurationNeeded => true, - Self::Revoked => false, + /// Matches Intel QVL's `convergeTcbStatusWithQeTcbStatus()` from + /// `TcbLevelCheck.cpp`. The QE status can only be UpToDate, OutOfDate, + /// or Revoked (from QE Identity verification). + fn converge_with_qe(self, qe: TcbStatus) -> TcbStatus { + use TcbStatus::*; + match (qe, self) { + // QE is OutOfDate: escalate platform status + (OutOfDate, ConfigurationNeeded | ConfigurationAndSWHardeningNeeded) => { + OutOfDateConfigurationNeeded + } + _ => qe.max(self), } } } @@ -124,13 +136,11 @@ impl TcbStatusWithAdvisory { } } - /// Merge two TCB statuses, taking the worse status and combining advisory IDs + /// Merge platform TCB status with QE TCB status, following Intel QVL's + /// `convergeTcbStatusWithQeTcbStatus()` logic. `self` is the platform + /// status, `other` is the QE status. pub fn merge(self, other: &TcbStatusWithAdvisory) -> Self { - let final_status = if other.status.severity() > self.status.severity() { - other.status - } else { - self.status - }; + let final_status = self.status.converge_with_qe(other.status); let mut advisory_ids = self.advisory_ids; for id in &other.advisory_ids { @@ -151,30 +161,70 @@ mod tests { use super::*; use TcbStatus::*; + fn merge(platform: TcbStatus, qe: TcbStatus) -> TcbStatus { + TcbStatusWithAdvisory::new(platform, vec![]) + .merge(&TcbStatusWithAdvisory::new(qe, vec![])) + .status + } + + // ── QE UpToDate: pass through platform status ────────────────────── #[test] - fn test_tcb_status_merge_both_up_to_date() { - let a = TcbStatusWithAdvisory::new(UpToDate, vec![]); - let b = TcbStatusWithAdvisory::new(UpToDate, vec![]); - let result = a.merge(&b); - assert_eq!(result.status, UpToDate); - assert!(result.advisory_ids.is_empty()); + fn qe_uptodate_passes_through() { + assert_eq!(merge(UpToDate, UpToDate), UpToDate); + assert_eq!(merge(SWHardeningNeeded, UpToDate), SWHardeningNeeded); + assert_eq!(merge(ConfigurationNeeded, UpToDate), ConfigurationNeeded); + assert_eq!( + merge(ConfigurationAndSWHardeningNeeded, UpToDate), + ConfigurationAndSWHardeningNeeded + ); + assert_eq!(merge(OutOfDate, UpToDate), OutOfDate); + assert_eq!( + merge(OutOfDateConfigurationNeeded, UpToDate), + OutOfDateConfigurationNeeded + ); + assert_eq!(merge(Revoked, UpToDate), Revoked); } + // ── QE OutOfDate: escalate platform status ───────────────────────── #[test] - fn test_tcb_status_merge_takes_worse() { - let a = TcbStatusWithAdvisory::new(UpToDate, vec![]); - let b = TcbStatusWithAdvisory::new(OutOfDate, vec!["INTEL-SA-00001".into()]); - let result = a.merge(&b); - assert_eq!(result.status, OutOfDate); - assert_eq!(result.advisory_ids, vec!["INTEL-SA-00001"]); + fn qe_outofdate_escalates() { + assert_eq!(merge(UpToDate, OutOfDate), OutOfDate); + assert_eq!(merge(SWHardeningNeeded, OutOfDate), OutOfDate); + assert_eq!( + merge(ConfigurationNeeded, OutOfDate), + OutOfDateConfigurationNeeded + ); + assert_eq!( + merge(ConfigurationAndSWHardeningNeeded, OutOfDate), + OutOfDateConfigurationNeeded + ); + } + + #[test] + fn qe_outofdate_already_worse_keeps() { + assert_eq!(merge(OutOfDate, OutOfDate), OutOfDate); + assert_eq!( + merge(OutOfDateConfigurationNeeded, OutOfDate), + OutOfDateConfigurationNeeded + ); + assert_eq!(merge(Revoked, OutOfDate), Revoked); + } + + // ── QE Revoked: always revoked ───────────────────────────────────── + #[test] + fn qe_revoked_always_revoked() { + assert_eq!(merge(UpToDate, Revoked), Revoked); + assert_eq!(merge(SWHardeningNeeded, Revoked), Revoked); + assert_eq!(merge(OutOfDate, Revoked), Revoked); + assert_eq!(merge(ConfigurationNeeded, Revoked), Revoked); } + // ── Advisory ID merging ──────────────────────────────────────────── #[test] - fn test_tcb_status_merge_combines_advisories() { + fn merge_combines_advisories() { let a = TcbStatusWithAdvisory::new(OutOfDate, vec!["INTEL-SA-00001".into()]); - let b = TcbStatusWithAdvisory::new(SWHardeningNeeded, vec!["INTEL-SA-00002".into()]); + let b = TcbStatusWithAdvisory::new(UpToDate, vec!["INTEL-SA-00002".into()]); let result = a.merge(&b); - assert_eq!(result.status, OutOfDate); assert_eq!( result.advisory_ids, vec!["INTEL-SA-00001", "INTEL-SA-00002"] @@ -182,7 +232,7 @@ mod tests { } #[test] - fn test_tcb_status_merge_deduplicates_advisories() { + fn merge_deduplicates_advisories() { let a = TcbStatusWithAdvisory::new(OutOfDate, vec!["INTEL-SA-00001".into()]); let b = TcbStatusWithAdvisory::new(OutOfDate, vec!["INTEL-SA-00001".into()]); let result = a.merge(&b); diff --git a/src/utils.rs b/src/utils.rs index 9abedef..626807e 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -104,6 +104,40 @@ pub(crate) fn extract_raw_certs(cert_chain: &[u8]) -> Result>> { .collect()) } +pub(crate) fn parse_rfc3339_unix_secs(value: &str) -> Result { + chrono::DateTime::parse_from_rfc3339(value) + .context("Failed to parse RFC3339 datetime")? + .timestamp() + .try_into() + .context("RFC3339 datetime is before Unix epoch") +} + +pub(crate) mod serde_vec_bytes { + use alloc::vec::Vec; + use serde::ser::SerializeSeq; + use serde::{Deserialize, Deserializer, Serializer}; + use serde_bytes::{ByteBuf, Bytes}; + + pub fn serialize(value: &[Vec], serializer: S) -> Result + where + S: Serializer, + { + let mut seq = serializer.serialize_seq(Some(value.len()))?; + for item in value { + seq.serialize_element(Bytes::new(item))?; + } + seq.end() + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result>, D::Error> + where + D: Deserializer<'de>, + { + let value = Vec::::deserialize(deserializer)?; + Ok(value.into_iter().map(ByteBuf::into_vec).collect()) + } +} + pub fn extract_certs<'a>(cert_chain: &'a [u8]) -> Result>> { let mut certs = Vec::>::new(); @@ -139,6 +173,33 @@ pub fn encode_as_der(data: &[u8]) -> Result> { Ok(writer.finish().context("Failed to finish writer")?.to_vec()) } +/// Extract the CRL Number (OID 2.5.29.20) from a DER-encoded CRL. +/// +/// Returns `Ok(0)` if the CRL Number extension is not present. +pub fn extract_crl_number(crl_der: &[u8]) -> Result { + use der::Decode as _; + let crl = x509_cert::crl::CertificateList::from_der(crl_der).context("Failed to parse CRL")?; + let Some(extensions) = &crl.tbs_cert_list.crl_extensions else { + return Ok(0); + }; + for ext in extensions.iter() { + // OID 2.5.29.20 = id-ce-cRLNumber + if ext.extn_id.to_string() == "2.5.29.20" { + // CRL Number is encoded as an ASN.1 INTEGER + let crl_num = + der::asn1::UintRef::from_der(ext.extn_value.as_bytes()).context("CRL number")?; + let bytes = crl_num.as_bytes(); + // Convert big-endian bytes to u32 (CRL numbers are typically small) + let mut val: u32 = 0; + for &b in bytes { + val = val.checked_shl(8).context("CRL number too large for u32")? | u32::from(b); + } + return Ok(val); + } + } + Ok(0) +} + /// Parse CRL DER bytes into CertRevocationList objects. /// Call this once and pass the results to `verify_certificate_chain`. pub fn parse_crls(crl_der: &[&[u8]]) -> Result>> { diff --git a/src/verify.rs b/src/verify.rs index 4b26caf..8dca45c 100644 --- a/src/verify.rs +++ b/src/verify.rs @@ -7,29 +7,35 @@ use scale::Decode; use { crate::constants::*, crate::intel, - crate::qe_identity::QeIdentity, - crate::tcb_info::{TcbInfo, TcbStatusWithAdvisory}, + crate::policy::{ + PckCertFlag, PckIdentity, PlatformInfo, Policy, QeInfo, SupplementalData, TcbVerdict, + }, + crate::qe_identity::{QeIdentity, QeTcbLevel}, + crate::tcb_info::{TcbInfo, TcbLevel, TcbStatus, TcbStatusWithAdvisory}, alloc::string::String, alloc::vec::Vec, }; pub use crate::quote::{AuthData, EnclaveReport, Quote}; + +#[cfg(feature = "ring")] +pub(crate) use self::ring as default_crypto; +#[cfg(all(not(feature = "ring"), feature = "rustcrypto"))] +pub(crate) use self::rustcrypto as default_crypto; use crate::{ quote::{Report, TDAttributes}, - utils::{encode_as_der, extract_certs, parse_crls, verify_certificate_chain}, + utils::{ + encode_as_der, extract_certs, parse_crls, parse_rfc3339_unix_secs, verify_certificate_chain, + }, }; use crate::{ quote::{TDReport10, TDReport15}, QuoteCollateralV3, }; + use rustls_pki_types::CertificateDer; use serde::{Deserialize, Serialize}; -#[cfg(feature = "ring")] -pub(crate) use self::ring as default_crypto; -#[cfg(all(not(feature = "ring"), feature = "rustcrypto"))] -pub(crate) use self::rustcrypto as default_crypto; - /// Crypto backend configuration for quote verification. /// /// Holds the signature verification algorithm and SHA-256 implementation @@ -40,6 +46,8 @@ pub struct CryptoBackend { pub sig_algo: &'static dyn rustls_pki_types::SignatureVerificationAlgorithm, /// SHA-256 hash function pub sha256: fn(&[u8]) -> [u8; 32], + /// SHA-384 hash function (used for root_key_id computation) + pub sha384: fn(&[u8]) -> [u8; 48], } #[derive(Debug, Clone, Copy, PartialEq, Eq)] @@ -82,6 +90,144 @@ use borsh::BorshSchema; #[cfg(feature = "borsh")] use borsh::{BorshDeserialize, BorshSerialize}; +/// Result of cryptographic quote verification, before policy validation. +/// +/// The enclave report is private — it can only be obtained by passing a [`Policy`] +/// via [`validate()`](Self::validate). +/// +/// [`SupplementalData`] is built lazily via [`supplemental()`](Self::supplemental) — +/// the `verify()` call itself does the minimum work (crypto only). +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] +#[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] +pub struct QuoteVerificationResult { + report: Report, + collateral: QuoteCollateralV3, + #[serde(with = "crate::utils::serde_vec_bytes")] + pck_cert_chain_der: Vec>, + // -- core verification results (always computed) -- + tee_type: u32, + tcb_status: TcbStatus, + advisory_ids: Vec, + platform_tcb_level: TcbLevel, + qe_tcb_level: QeTcbLevel, + pck_ext: PckCertChainResult, + qe_report: EnclaveReport, + tcb_eval_data_number: u32, + qe_tcb_eval_data_number: u32, + #[serde(with = "serde_bytes")] + root_key_id: [u8; 48], +} + +impl QuoteVerificationResult { + /// Build the full [`SupplementalData`] from verification intermediates. + /// + /// Computes the collateral time window from all 8 sources (TCBInfo, QEIdentity, + /// 2 CRLs, 4 certificate chains), root_key_id SHA-384, CRL numbers, and tcb_date_tag. + pub fn supplemental(&self) -> Result { + // Parse collateral JSON for time window computation + let tcb_info: TcbInfo = serde_json::from_str(&self.collateral.tcb_info) + .context("Failed to parse TcbInfo for supplemental")?; + let qe_identity: QeIdentity = serde_json::from_str(&self.collateral.qe_identity) + .context("Failed to parse QeIdentity for supplemental")?; + let pck_certs: Vec> = self + .pck_cert_chain_der + .iter() + .map(|cert| CertificateDer::from(cert.as_slice())) + .collect(); + + let collateral_dates = + compute_collateral_time_window(&self.collateral, &pck_certs, &tcb_info, &qe_identity)?; + + // root_key_id: SHA-384 of root CA's raw public key bytes + let root_key_id = self.root_key_id; + + // CRL numbers + let root_ca_crl_num = + crate::utils::extract_crl_number(&self.collateral.root_ca_crl).unwrap_or(0); + let pck_crl_num = crate::utils::extract_crl_number(&self.collateral.pck_crl).unwrap_or(0); + + // tcb_date_tag + let tcb_date_tag = parse_rfc3339_unix_secs(&self.platform_tcb_level.tcb_date) + .context("Failed to parse platform TCB date")?; + + Ok(SupplementalData { + tee_type: self.tee_type, + tcb: TcbVerdict { + status: self.tcb_status, + advisory_ids: self.advisory_ids.clone(), + eval_data_number: self.tcb_eval_data_number, + }, + platform: PlatformInfo { + tcb_level: self.platform_tcb_level.clone(), + tcb_date_tag, + pck: PckIdentity { + ppid: self.pck_ext.ppid.clone(), + cpu_svn: self.pck_ext.cpu_svn, + pce_svn: self.pck_ext.pce_svn, + pce_id: self.pck_ext.pce_id.clone(), + fmspc: self.pck_ext.fmspc, + sgx_type: self.pck_ext.sgx_type, + platform_instance_id: self.pck_ext.platform_instance_id, + dynamic_platform: self.pck_ext.dynamic_platform, + cached_keys: self.pck_ext.cached_keys, + smt_enabled: self.pck_ext.smt_enabled, + // Intel's upstream DCAP Rego policy checks + // `platform_provider_id`, but the upstream QvE producer + // currently leaves it as a TODO when building the platform + // measurement JSON: + // https://github.com/intel/confidential-computing.tee.dcap/blob/main/ae/QvE/qve/qve.cpp + platform_provider_id: None, + }, + root_key_id, + pck_crl_num, + root_ca_crl_num, + }, + qe: QeInfo { + tcb_level: self.qe_tcb_level.clone(), + report: self.qe_report, + tcb_eval_data_number: self.qe_tcb_eval_data_number, + }, + report: self.report.clone(), + earliest_issue_date: collateral_dates.earliest_issue, + latest_issue_date: collateral_dates.latest_issue, + earliest_expiration_date: collateral_dates.earliest_expiration, + qe_iden_earliest_issue_date: collateral_dates.qe_iden_earliest_issue, + qe_iden_latest_issue_date: collateral_dates.qe_iden_latest_issue, + qe_iden_earliest_expiration_date: collateral_dates.qe_iden_earliest_expiration, + }) + } + + /// Validate against a policy, consuming self into [`VerifiedReport`] on success. + pub fn validate(self, policy: &P) -> Result { + let supplemental = self.supplemental()?; + policy.validate(&supplemental)?; + Ok(self.into_report_unchecked()) + } + + /// The Platform Provisioning ID (PPID) extracted from the PCK certificate. + pub fn ppid(&self) -> &[u8] { + &self.pck_ext.ppid + } + + /// Convert directly into [`VerifiedReport`] **without applying any policy**. + /// + /// # Warning + /// This skips all policy checks (TCB status, advisory IDs, collateral + /// freshness, platform flags). Use only when you handle validation + /// externally or intentionally accept any verification result. + pub fn into_report_unchecked(self) -> VerifiedReport { + VerifiedReport { + status: self.tcb_status.to_string(), + advisory_ids: self.advisory_ids, + report: self.report, + ppid: self.pck_ext.ppid, + platform_tcb_level: self.platform_tcb_level, + qe_tcb_level: self.qe_tcb_level, + } + } +} + #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] #[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] #[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] @@ -91,14 +237,14 @@ pub struct VerifiedReport { pub report: Report, #[serde(with = "serde_bytes")] pub ppid: Vec, - pub qe_status: TcbStatusWithAdvisory, - pub platform_status: TcbStatusWithAdvisory, + pub platform_tcb_level: TcbLevel, + pub qe_tcb_level: QeTcbLevel, } /// Quote verifier with configurable root certificate and crypto backend. /// -/// This allows using custom root certificates for testing or private deployments, -/// and selecting between different cryptographic backends (ring or rustcrypto). +/// Returns [`QuoteVerificationResult`] from cryptographic verification. +/// The caller applies a [`Policy`] via [`QuoteVerificationResult::validate()`]. pub struct QuoteVerifier { root_ca_der: Vec, backend: CryptoBackend, @@ -113,27 +259,28 @@ impl QuoteVerifier { } } - /// Create a new verifier using Intel's production root certificate with ring backend. + /// Create a new verifier using Intel's production root certificate. pub fn new_prod(backend: CryptoBackend) -> Self { Self::new(TRUSTED_ROOT_CA_DER.to_vec(), backend) } - /// Verify a quote with the configured root certificate - /// - /// # Arguments - /// * `raw_quote` - The raw quote bytes - /// * `collateral` - The quote collateral - /// * `now_secs` - Current time in seconds since UNIX epoch + #[cfg(feature = "_anycrypto")] + pub fn new_prod_default_crypto() -> Self { + Self::new_prod(default_crypto::backend()) + } + + /// Perform cryptographic verification, returning [`QuoteVerificationResult`]. /// - /// # Returns - /// * `Ok(VerifiedReport)` - The verified report - /// * `Err(Error)` - The error + /// Takes ownership of `collateral` so it can be used for lazy Rego time-window + /// computation. This does NOT apply any policy. Use + /// [`QuoteVerificationResult::validate()`] to apply a policy and obtain a + /// [`VerifiedReport`]. pub fn verify( &self, raw_quote: &[u8], - collateral: &QuoteCollateralV3, + collateral: QuoteCollateralV3, now_secs: u64, - ) -> Result { + ) -> Result { verify_impl( raw_quote, collateral, @@ -145,25 +292,18 @@ impl QuoteVerifier { ) } - /// Verify a quote with the configured root certificate, passing a TCB info override + /// Verify a quote with the configured root certificate, passing a TCB info override. /// - /// # Arguments - /// * `raw_quote` - The raw quote bytes - /// * `collateral` - The quote collateral - /// * `now_secs` - Current time in seconds since UNIX epoch - /// * `override_tcb_info` - a function which modifies TCB info after the signature check - /// - /// # Returns - /// * `Ok(VerifiedReport)` - The verified report - /// * `Err(Error)` - The error + /// The override function receives `TcbInfo` after signature verification and can + /// modify it before TCB level matching. Use with extreme caution. #[cfg(feature = "danger-allow-tcb-override")] pub fn dangerous_verify_with_tcb_override( &self, raw_quote: &[u8], - collateral: &QuoteCollateralV3, + collateral: QuoteCollateralV3, now_secs: u64, override_tcb_info: impl FnOnce(TcbInfo) -> TcbInfo, - ) -> Result { + ) -> Result { verify_impl( raw_quote, collateral, @@ -175,67 +315,309 @@ impl QuoteVerifier { } } -#[cfg(all(feature = "js", feature = "_anycrypto"))] -#[wasm_bindgen] -pub fn js_verify( - raw_quote: JsValue, - quote_collateral: JsValue, - now: u64, -) -> Result { - let raw_quote: Vec = serde_wasm_bindgen::from_value(raw_quote) - .map_err(|_| JsValue::from_str("Failed to decode raw_quote"))?; - let quote_collateral = serde_wasm_bindgen::from_value::(quote_collateral)?; - - let verified_report = verify(&raw_quote, "e_collateral, now).map_err(|e| { - let error_msg = format_error_chain(&e); - serde_wasm_bindgen::to_value(&error_msg) - .unwrap_or_else(|_| JsValue::from_str("Failed to encode Error")) - })?; - - serde_wasm_bindgen::to_value(&verified_report) - .map_err(|_| JsValue::from_str("Failed to encode verified_report")) +/// Verification policy builder for JS/WASM. +/// +/// ```js +/// const policy = new SimplePolicy(now) +/// .allow_status("OutOfDate") +/// .collateral_grace_period(7n * 86400n) +/// .allow_smt(true); +/// ``` +#[cfg(feature = "js")] +#[wasm_bindgen(js_name = "SimplePolicy")] +pub struct JsSimplePolicy { + inner: crate::policy::SimplePolicy, +} + +#[cfg(feature = "js")] +fn js_parse_tcb_status(s: &str) -> Result { + match s { + "UpToDate" => Ok(TcbStatus::UpToDate), + "SWHardeningNeeded" => Ok(TcbStatus::SWHardeningNeeded), + "ConfigurationNeeded" => Ok(TcbStatus::ConfigurationNeeded), + "ConfigurationAndSWHardeningNeeded" => Ok(TcbStatus::ConfigurationAndSWHardeningNeeded), + "OutOfDate" => Ok(TcbStatus::OutOfDate), + "OutOfDateConfigurationNeeded" => Ok(TcbStatus::OutOfDateConfigurationNeeded), + "Revoked" => Ok(TcbStatus::Revoked), + _ => Err(JsValue::from_str(&alloc::format!( + "Unknown TCB status: {s}" + ))), + } +} + +#[cfg(feature = "js")] +#[wasm_bindgen(js_class = "SimplePolicy")] +impl JsSimplePolicy { + /// Create a strict policy: only `UpToDate`, no grace period, no advisory blacklist. + #[wasm_bindgen(constructor)] + pub fn strict(now_secs: u64) -> Self { + Self { + inner: crate::policy::SimplePolicy::strict(now_secs), + } + } + + /// Allow an additional TCB status (e.g. "OutOfDate", "SWHardeningNeeded"). + pub fn allow_status(self, status: &str) -> Result { + let s = js_parse_tcb_status(status)?; + Ok(Self { + inner: self.inner.allow_status(s), + }) + } + + /// Reject a specific advisory ID (e.g. "INTEL-SA-00334"). + pub fn reject_advisory(self, id: &str) -> Self { + Self { + inner: self.inner.reject_advisory(id), + } + } + + /// Reject multiple advisory IDs at once. + pub fn reject_advisories(self, ids: Vec) -> Self { + Self { + inner: self.inner.reject_advisories(&ids), + } + } + + /// Set collateral grace period in seconds. + pub fn collateral_grace_period(self, secs: u64) -> Self { + Self { + inner: self + .inner + .collateral_grace_period(Duration::from_secs(secs)), + } + } + + /// Set platform grace period in seconds. + pub fn platform_grace_period(self, secs: u64) -> Self { + Self { + inner: self.inner.platform_grace_period(Duration::from_secs(secs)), + } + } + + /// Set QE grace period in seconds. + pub fn qe_grace_period(self, secs: u64) -> Self { + Self { + inner: self.inner.qe_grace_period(Duration::from_secs(secs)), + } + } + + /// Set minimum TCB evaluation data number. + pub fn min_tcb_eval_data_number(self, min: u32) -> Self { + Self { + inner: self.inner.min_tcb_eval_data_number(min), + } + } + + /// Set whether dynamic platforms are allowed. + pub fn allow_dynamic_platform(self, allow: bool) -> Self { + Self { + inner: self.inner.allow_dynamic_platform(allow), + } + } + + /// Set whether cached keys are allowed. + pub fn allow_cached_keys(self, allow: bool) -> Self { + Self { + inner: self.inner.allow_cached_keys(allow), + } + } + + /// Set whether SMT (hyperthreading) is allowed. + pub fn allow_smt(self, allow: bool) -> Self { + Self { + inner: self.inner.allow_smt(allow), + } + } + + /// Set accepted SGX types (e.g. [0, 1, 2]). + pub fn accepted_sgx_types(self, types: Vec) -> Self { + Self { + inner: self.inner.accepted_sgx_types(&types), + } + } +} + +/// Intel QAL-compatible Rego policy for JS/WASM. +#[cfg(all(feature = "js", feature = "rego"))] +#[wasm_bindgen(js_name = "RegoPolicy")] +pub struct JsRegoPolicy { + inner: crate::policy::RegoPolicy, +} + +#[cfg(all(feature = "js", feature = "rego"))] +#[wasm_bindgen(js_class = "RegoPolicy")] +impl JsRegoPolicy { + #[wasm_bindgen(constructor)] + pub fn new(policy_json: &str) -> Result { + let inner = crate::policy::RegoPolicy::new(policy_json) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + Ok(Self { inner }) + } + + pub fn with_rego(policy_json: &str, rego_source: &str) -> Result { + let inner = crate::policy::RegoPolicy::with_rego(policy_json, rego_source) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + Ok(JsRegoPolicy { inner }) + } +} + +/// Multi-measurement Intel QAL-compatible Rego policy set for JS/WASM. +#[cfg(all(feature = "js", feature = "rego"))] +#[wasm_bindgen(js_name = "RegoPolicySet")] +pub struct JsRegoPolicySet { + inner: crate::policy::RegoPolicySet, +} + +#[cfg(all(feature = "js", feature = "rego"))] +#[wasm_bindgen(js_class = "RegoPolicySet")] +impl JsRegoPolicySet { + #[wasm_bindgen(constructor)] + pub fn new(policy_jsons: Vec) -> Result { + let policy_refs: Vec<&str> = policy_jsons.iter().map(String::as_str).collect(); + let inner = crate::policy::RegoPolicySet::new(&policy_refs) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + Ok(Self { inner }) + } + + pub fn with_rego( + policy_jsons: Vec, + rego_source: &str, + ) -> Result { + let policy_refs: Vec<&str> = policy_jsons.iter().map(String::as_str).collect(); + let inner = crate::policy::RegoPolicySet::with_rego(&policy_refs, rego_source) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + Ok(JsRegoPolicySet { inner }) + } +} + +/// Result of cryptographic quote verification (phase 1) for JS/WASM. +/// +/// Use `validate(policy)` to apply a [`JsSimplePolicy`] and get a `VerifiedReport`. +/// Use `into_report_unchecked()` to skip policy validation. +#[cfg(feature = "js")] +#[wasm_bindgen(js_name = "QuoteVerificationResult")] +pub struct JsQuoteVerificationResult { + inner: Option, +} + +#[cfg(feature = "js")] +#[wasm_bindgen(js_class = "QuoteVerificationResult")] +impl JsQuoteVerificationResult { + /// Validate against a policy, returning a VerifiedReport. Consumes the result. + pub fn validate(&mut self, policy: &JsSimplePolicy) -> Result { + let result = self + .inner + .take() + .ok_or_else(|| JsValue::from_str("verification result already consumed"))?; + let report = result + .validate(&policy.inner) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + serde_wasm_bindgen::to_value(&report) + .map_err(|_| JsValue::from_str("Failed to encode verified_report")) + } + + /// Validate against a Rego policy, returning a VerifiedReport. Consumes the result. + #[cfg(feature = "rego")] + pub fn validate_rego(&mut self, policy: &JsRegoPolicy) -> Result { + let result = self + .inner + .take() + .ok_or_else(|| JsValue::from_str("verification result already consumed"))?; + let report = result + .validate(&policy.inner) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + serde_wasm_bindgen::to_value(&report) + .map_err(|_| JsValue::from_str("Failed to encode verified_report")) + } + + /// Validate against a Rego policy set, returning a VerifiedReport. Consumes the result. + #[cfg(feature = "rego")] + pub fn validate_rego_set(&mut self, policy: &JsRegoPolicySet) -> Result { + let result = self + .inner + .take() + .ok_or_else(|| JsValue::from_str("verification result already consumed"))?; + let report = result + .validate(&policy.inner) + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + serde_wasm_bindgen::to_value(&report) + .map_err(|_| JsValue::from_str("Failed to encode verified_report")) + } + + /// Get VerifiedReport without policy validation. Consumes the result. + pub fn into_report_unchecked(&mut self) -> Result { + let result = self + .inner + .take() + .ok_or_else(|| JsValue::from_str("verification result already consumed"))?; + serde_wasm_bindgen::to_value(&result.into_report_unchecked()) + .map_err(|_| JsValue::from_str("Failed to encode verified_report")) + } } -#[cfg(all(feature = "js", feature = "_anycrypto"))] -#[wasm_bindgen] -pub fn js_verify_with_root_ca( - raw_quote: JsValue, - quote_collateral: JsValue, - root_ca_der: JsValue, - now: u64, -) -> Result { - let raw_quote: Vec = serde_wasm_bindgen::from_value(raw_quote) - .map_err(|_| JsValue::from_str("Failed to decode raw_quote"))?; - let quote_collateral = serde_wasm_bindgen::from_value::(quote_collateral)?; - let root_ca_der: Vec = serde_wasm_bindgen::from_value(root_ca_der) - .map_err(|_| JsValue::from_str("Failed to decode root_ca_der"))?; - - let verifier = QuoteVerifier::new(root_ca_der, default_crypto::backend()); - let verified_report = verifier - .verify(&raw_quote, "e_collateral, now) - .map_err(|e| { - let error_msg = format_error_chain(&e); - serde_wasm_bindgen::to_value(&error_msg) - .unwrap_or_else(|_| JsValue::from_str("Failed to encode Error")) - })?; - - serde_wasm_bindgen::to_value(&verified_report) - .map_err(|_| JsValue::from_str("Failed to encode verified_report")) +/// Quote verifier for JS/WASM. +/// +/// ```js +/// const verifier = new QuoteVerifier(); // Intel production root CA +/// const verifier = new QuoteVerifier(rootCaDer); // custom root CA +/// const result = verifier.verify(quote, collateral, now); +/// ``` +#[cfg(feature = "js")] +#[wasm_bindgen(js_name = "QuoteVerifier")] +pub struct JsQuoteVerifier { + inner: QuoteVerifier, } #[cfg(feature = "js")] -#[wasm_bindgen] -pub async fn js_get_collateral(pccs_url: JsValue, raw_quote: JsValue) -> Result { - let pccs_url: String = serde_wasm_bindgen::from_value(pccs_url) - .map_err(|_| JsValue::from_str("Failed to decode pccs_url"))?; - let raw_quote: Vec = serde_wasm_bindgen::from_value(raw_quote) - .map_err(|_| JsValue::from_str("Failed to decode raw_quote"))?; - - let collateral: QuoteCollateralV3 = crate::collateral::get_collateral(&pccs_url, &raw_quote) - .await - .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; - serde_wasm_bindgen::to_value(&collateral) - .map_err(|_| JsValue::from_str("Failed to encode collateral")) +#[wasm_bindgen(js_class = "QuoteVerifier")] +impl JsQuoteVerifier { + /// Create a verifier. No argument = Intel production root CA; pass `rootCaDer` for custom. + #[wasm_bindgen(constructor)] + pub fn new(root_ca_der: Option>) -> Self { + let inner = match root_ca_der { + Some(der) => QuoteVerifier::new(der, default_crypto::backend()), + None => QuoteVerifier::new_prod(default_crypto::backend()), + }; + Self { inner } + } + + /// Perform cryptographic verification, returning a [`QuoteVerificationResult`]. + pub fn verify( + &self, + raw_quote: JsValue, + quote_collateral: JsValue, + now: u64, + ) -> Result { + let raw_quote: Vec = serde_wasm_bindgen::from_value(raw_quote) + .map_err(|_| JsValue::from_str("Failed to decode raw_quote"))?; + let quote_collateral = + serde_wasm_bindgen::from_value::(quote_collateral)?; + + let result = self + .inner + .verify(&raw_quote, quote_collateral, now) + .map_err(|e| { + let error_msg = format_error_chain(&e); + serde_wasm_bindgen::to_value(&error_msg) + .unwrap_or_else(|_| JsValue::from_str("Failed to encode Error")) + })?; + + Ok(JsQuoteVerificationResult { + inner: Some(result), + }) + } + + /// Fetch collateral from a PCCS server. + pub async fn get_collateral(pccs_url: &str, raw_quote: JsValue) -> Result { + let raw_quote: Vec = serde_wasm_bindgen::from_value(raw_quote) + .map_err(|_| JsValue::from_str("Failed to decode raw_quote"))?; + + let collateral: QuoteCollateralV3 = crate::collateral::get_collateral(pccs_url, &raw_quote) + .await + .map_err(|e| JsValue::from_str(&format_error_chain(&e)))?; + serde_wasm_bindgen::to_value(&collateral) + .map_err(|_| JsValue::from_str("Failed to encode collateral")) + } } // ============================================================================= @@ -255,16 +637,14 @@ fn verify_tcb_info_signature( .context("Failed to decode TcbInfo")?; // Check validity window - let issue_date = chrono::DateTime::parse_from_rfc3339(&tcb_info.issue_date) - .ok() + let issue_date = parse_rfc3339_unix_secs(&tcb_info.issue_date) .context("Failed to parse TCB Info issue date")?; - let next_update = chrono::DateTime::parse_from_rfc3339(&tcb_info.next_update) - .ok() + let next_update = parse_rfc3339_unix_secs(&tcb_info.next_update) .context("Failed to parse TCB Info next update")?; - if now.as_secs() < issue_date.timestamp() as u64 { + if now.as_secs() < issue_date { bail!("TCBInfo issue date is in the future"); } - if now.as_secs() > next_update.timestamp() as u64 { + if now.as_secs() > next_update { bail!("TCBInfo expired"); } @@ -310,16 +690,14 @@ fn verify_qe_identity_signature( .context("Failed to decode QeIdentity")?; // Check validity window - let issue_date = chrono::DateTime::parse_from_rfc3339(&qe_identity.issue_date) - .ok() + let issue_date = parse_rfc3339_unix_secs(&qe_identity.issue_date) .context("Failed to parse QE Identity issue date")?; - let next_update = chrono::DateTime::parse_from_rfc3339(&qe_identity.next_update) - .ok() + let next_update = parse_rfc3339_unix_secs(&qe_identity.next_update) .context("Failed to parse QE Identity next update")?; - if now.as_secs() < issue_date.timestamp() as u64 { + if now.as_secs() < issue_date { bail!("QE Identity issue date is in the future"); } - if now.as_secs() > next_update.timestamp() as u64 { + if now.as_secs() > next_update { bail!("QE Identity expired"); } @@ -387,22 +765,57 @@ fn verify_pck_cert_chain( // Extract Intel extension data from PCK cert (parsed once) let pck_ext = intel::parse_pck_extension(pck_leaf)?; + // Preserve pce_id as the raw value from the PCK cert SGX extension. + let pce_id = pck_ext.pce_id.clone(); + + // Convert platform_instance_id to fixed-size array + let platform_instance_id = pck_ext.platform_instance_id.as_ref().and_then(|v| { + let arr: [u8; 16] = v.as_slice().try_into().ok()?; + Some(arr) + }); + Ok(PckCertChainResult { + pck_cert_chain_der: certification_certs + .iter() + .map(|cert| cert.as_ref().to_vec()) + .collect(), pck_leaf_der: pck_leaf.as_ref().to_vec(), ppid: pck_ext.ppid, cpu_svn: pck_ext.cpu_svn, pce_svn: pck_ext.pce_svn, fmspc: pck_ext.fmspc, + pce_id, + sgx_type: pck_ext.sgx_type as u8, + platform_instance_id, + dynamic_platform: pck_ext.dynamic_platform.into(), + cached_keys: pck_ext.cached_keys.into(), + smt_enabled: pck_ext.smt_enabled.into(), }) } /// Result from PCK certificate chain verification +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr(feature = "borsh", derive(BorshSerialize, BorshDeserialize))] +#[cfg_attr(feature = "borsh_schema", derive(BorshSchema))] struct PckCertChainResult { + #[serde(with = "crate::utils::serde_vec_bytes")] + pck_cert_chain_der: Vec>, + #[serde(with = "serde_bytes")] pck_leaf_der: Vec, + #[serde(with = "serde_bytes")] ppid: Vec, + #[serde(with = "serde_bytes")] cpu_svn: [u8; 16], pce_svn: u16, + #[serde(with = "serde_bytes")] fmspc: [u8; 6], + #[serde(with = "serde_bytes")] + pce_id: Vec, + sgx_type: u8, + platform_instance_id: Option<[u8; 16]>, + dynamic_platform: PckCertFlag, + cached_keys: PckCertFlag, + smt_enabled: PckCertFlag, } // ============================================================================= @@ -500,7 +913,7 @@ fn verify_isv_report_signature( // Step 8: Match Platform TCB (PCK Cert's CPU_SVN/PCE_SVN/FMSPC vs TCB Info) // ============================================================================= -/// Match platform TCB level and return status with advisory IDs +/// Match platform TCB level and return the matched TcbLevel fn match_platform_tcb( tcb_info: &TcbInfo, quote: &Quote, @@ -508,7 +921,7 @@ fn match_platform_tcb( cpu_svn: &[u8], pce_svn: u16, fmspc: &[u8], -) -> Result { +) -> Result { // Verify FMSPC matches let tcb_fmspc = hex::decode(&tcb_info.fmspc) .ok() @@ -569,11 +982,8 @@ fn match_platform_tcb( } } - // Found matching level - return Ok(TcbStatusWithAdvisory::new( - tcb_level.tcb_status, - tcb_level.advisory_ids.clone(), - )); + // Found matching level - return the full TcbLevel + return Ok(tcb_level.clone()); } bail!("No matching TCB level found"); @@ -583,7 +993,8 @@ fn match_platform_tcb( // Main verification flow following the trust chain // ============================================================================= -/// Internal implementation that uses QuoteVerifier +/// Cryptographic verification of a quote. Returns [`SupplementalData`] without +/// applying any policy — the caller decides acceptance via [`SupplementalData::validate()`]. /// /// Trust chain verification order: /// 1. Verify TCB Info signature (Intel Root -> TCB Signing Cert -> TCB Info JSON) @@ -598,14 +1009,14 @@ fn match_platform_tcb( /// 10. Merge TCB statuses fn verify_impl( raw_quote: &[u8], - collateral: &QuoteCollateralV3, + collateral: QuoteCollateralV3, now_secs: u64, root_ca_der: &[u8], backend: &CryptoBackend, #[cfg(feature = "danger-allow-tcb-override")] override_tcb_info: Option< impl FnOnce(TcbInfo) -> TcbInfo, >, -) -> Result { +) -> Result { // Setup trust anchor and time let root_ca = CertificateDer::from_slice(root_ca_der); let trust_anchor = @@ -645,7 +1056,7 @@ fn verify_impl( // Step 1: Verify TCB Info signature let tcb_info = - verify_tcb_info_signature(collateral, now, &crls, trust_anchor.clone(), backend)?; + verify_tcb_info_signature(&collateral, now, &crls, trust_anchor.clone(), backend)?; #[cfg(feature = "danger-allow-tcb-override")] let tcb_info = match override_tcb_info { @@ -655,7 +1066,7 @@ fn verify_impl( // Step 2: Verify QE Identity signature let qe_identity = - verify_qe_identity_signature(collateral, now, &crls, trust_anchor.clone(), backend)?; + verify_qe_identity_signature(&collateral, now, &crls, trust_anchor.clone(), backend)?; let (expected_qe_id, allowed_qe_versions): (&str, &[u8]) = match tee_type { TeeType::Sgx => ("QE", &[2]), TeeType::Tdx => ("TD_QE", &[2, 3]), @@ -672,7 +1083,7 @@ fn verify_impl( // Step 3: Verify PCK certificate chain let pck_result = verify_pck_cert_chain( - collateral, + &collateral, &auth_data.certification_data, now, &crls, @@ -686,14 +1097,14 @@ fn verify_impl( // Step 5: Verify QE Report content (hash check) verify_qe_report_data(&qe_report, &auth_data, backend)?; - // Step 6: Verify QE Report policy - let qe_status = verify_qe_identity_policy(&qe_report, &qe_identity)?; + // Step 6: Verify QE Report policy (returns matched QeTcbLevel) + let qe_tcb_level = verify_qe_identity_policy(&qe_report, &qe_identity)?; // Step 7: Verify ISV Report signature verify_isv_report_signature(raw_quote, "e, &auth_data, backend)?; - // Step 8: Match Platform TCB - let platform_status = match_platform_tcb( + // Step 8: Match Platform TCB (returns matched TcbLevel) + let platform_tcb_level = match_platform_tcb( &tcb_info, "e, tee_type, @@ -702,22 +1113,208 @@ fn verify_impl( &pck_result.fmspc, )?; - // Step 9 & 10: QE TCB matching is done in verify_qe_identity_policy, merge statuses - let final_status = platform_status.clone().merge(&qe_status); - if !final_status.status.is_valid() { - bail!("TCB status is invalid: {:?}", final_status.status); + // Step 9 & 10: Merge statuses (take worst) + let platform_status = TcbStatusWithAdvisory::new( + platform_tcb_level.tcb_status, + platform_tcb_level.advisory_ids.clone(), + ); + let qe_status = + TcbStatusWithAdvisory::new(qe_tcb_level.tcb_status, qe_tcb_level.advisory_ids.clone()); + let final_status = platform_status.merge(&qe_status); + + // Revoked means the platform's keys are compromised — reject unconditionally, + // regardless of policy. This is a security invariant, not a policy decision. + if final_status.status == TcbStatus::Revoked { + bail!("TCB status is invalid: Revoked"); } + let root_key_id = { + let root_cert: x509_cert::Certificate = + der::Decode::from_der(root_ca_der).context("Failed to parse root CA certificate")?; + let raw_key = root_cert + .tbs_certificate + .subject_public_key_info + .subject_public_key + .raw_bytes(); + (backend.sha384)(raw_key) + }; + // Validate report attributes (debug mode check, etc.) validate_attrs("e.report)?; - Ok(VerifiedReport { - status: final_status.status.to_string(), - advisory_ids: final_status.advisory_ids, + Ok(QuoteVerificationResult { report: quote.report, - ppid: pck_result.ppid, - qe_status, - platform_status, + collateral, + pck_cert_chain_der: pck_result.pck_cert_chain_der.clone(), + tee_type: quote.header.tee_type, + tcb_status: final_status.status, + advisory_ids: final_status.advisory_ids, + platform_tcb_level, + qe_tcb_level, + pck_ext: pck_result, + qe_report, + tcb_eval_data_number: tcb_info + .tcb_evaluation_data_number + .min(qe_identity.tcb_evaluation_data_number), + qe_tcb_eval_data_number: qe_identity.tcb_evaluation_data_number, + root_key_id, + }) +} + +/// Collateral time window dates (8 sources + QE Identity subset). +struct CollateralDates { + earliest_issue: u64, + latest_issue: u64, + earliest_expiration: u64, + /// QE Identity-specific dates (sources \[5\] + \[7\] only). + qe_iden_earliest_issue: u64, + qe_iden_latest_issue: u64, + qe_iden_earliest_expiration: u64, +} + +/// Compute the collateral time window: earliest issue, latest issue, earliest expiration. +/// +/// Matches Intel QVL's `qve_get_collateral_dates()` which considers **8 date sources**: +/// +/// 1. Root CA CRL thisUpdate/nextUpdate +/// 2. PCK CRL thisUpdate/nextUpdate +/// 3. PCK CRL issuer certificate chain notBefore/notAfter +/// 4. PCK certificate chain notBefore/notAfter +/// 5. TCBInfo issuer certificate chain notBefore/notAfter +/// 6. QEIdentity issuer certificate chain notBefore/notAfter +/// 7. TCBInfo JSON issueDate/nextUpdate +/// 8. QEIdentity JSON issueDate/nextUpdate +fn compute_collateral_time_window( + collateral: &QuoteCollateralV3, + pck_cert_chain: &[CertificateDer<'_>], + tcb_info: &TcbInfo, + qe_identity: &QeIdentity, +) -> Result { + fn parse_crl_dates(crl_der: &[u8]) -> Result<(u64, Option)> { + use der::Decode as _; + let crl = x509_cert::crl::CertificateList::from_der(crl_der) + .context("Failed to parse CRL for time window")?; + let this_update = crl.tbs_cert_list.this_update.to_unix_duration().as_secs(); + let next_update = crl + .tbs_cert_list + .next_update + .map(|t| t.to_unix_duration().as_secs()); + Ok((this_update, next_update)) + } + + /// Extract notBefore/notAfter from a PEM certificate chain and fold into min/max accumulators. + fn fold_cert_chain_dates( + pem_chain: &[u8], + earliest_issue: &mut u64, + latest_issue: &mut u64, + earliest_expiration: &mut u64, + ) -> Result<()> { + let certs = extract_certs(pem_chain)?; + fold_der_cert_dates(&certs, earliest_issue, latest_issue, earliest_expiration) + } + + fn fold_der_cert_dates( + certs: &[CertificateDer<'_>], + earliest_issue: &mut u64, + latest_issue: &mut u64, + earliest_expiration: &mut u64, + ) -> Result<()> { + use der::Decode as _; + for cert_der in certs { + let cert = x509_cert::Certificate::from_der(cert_der) + .context("Failed to parse certificate for time window")?; + let not_before = cert + .tbs_certificate + .validity + .not_before + .to_unix_duration() + .as_secs(); + let not_after = cert + .tbs_certificate + .validity + .not_after + .to_unix_duration() + .as_secs(); + *earliest_issue = (*earliest_issue).min(not_before); + *latest_issue = (*latest_issue).max(not_before); + *earliest_expiration = (*earliest_expiration).min(not_after); + } + Ok(()) + } + + // TCBInfo dates (already parsed upstream) + let tcb_issue = parse_rfc3339_unix_secs(&tcb_info.issue_date).context("TCBInfo issueDate")?; + let tcb_next = parse_rfc3339_unix_secs(&tcb_info.next_update).context("TCBInfo nextUpdate")?; + + // QEIdentity dates (already parsed upstream) + let qe_issue = + parse_rfc3339_unix_secs(&qe_identity.issue_date).context("QEIdentity issueDate")?; + let qe_next = + parse_rfc3339_unix_secs(&qe_identity.next_update).context("QEIdentity nextUpdate")?; + + let mut earliest_issue = tcb_issue.min(qe_issue); + let mut latest_issue = tcb_issue.max(qe_issue); + let mut earliest_expiration = tcb_next.min(qe_next); + + // Include CRL dates (sources 1 & 2) + for crl_der in [&collateral.root_ca_crl[..], &collateral.pck_crl[..]] { + let (this_update, next_update) = parse_crl_dates(crl_der)?; + earliest_issue = earliest_issue.min(this_update); + latest_issue = latest_issue.max(this_update); + if let Some(next) = next_update { + earliest_expiration = earliest_expiration.min(next); + } + } + + // Include certificate chain dates (sources 3-6) + // PCK CRL issuer chain (same PEM as pck_crl_issuer_chain) + fold_cert_chain_dates( + collateral.pck_crl_issuer_chain.as_bytes(), + &mut earliest_issue, + &mut latest_issue, + &mut earliest_expiration, + )?; + // PCK certificate chain + fold_der_cert_dates( + pck_cert_chain, + &mut earliest_issue, + &mut latest_issue, + &mut earliest_expiration, + )?; + // TCBInfo issuer chain + fold_cert_chain_dates( + collateral.tcb_info_issuer_chain.as_bytes(), + &mut earliest_issue, + &mut latest_issue, + &mut earliest_expiration, + )?; + // QEIdentity issuer chain (source [5]) — also track QE-specific dates + let mut qe_chain_earliest_issue = u64::MAX; + let mut qe_chain_latest_issue = 0u64; + let mut qe_chain_earliest_expiration = u64::MAX; + fold_cert_chain_dates( + collateral.qe_identity_issuer_chain.as_bytes(), + &mut qe_chain_earliest_issue, + &mut qe_chain_latest_issue, + &mut qe_chain_earliest_expiration, + )?; + // Fold into global window + earliest_issue = earliest_issue.min(qe_chain_earliest_issue); + latest_issue = latest_issue.max(qe_chain_latest_issue); + earliest_expiration = earliest_expiration.min(qe_chain_earliest_expiration); + + // QE Identity-specific window: min/max of source [5] (issuer chain) + source [7] (JSON) + let qe_iden_earliest_issue = qe_chain_earliest_issue.min(qe_issue); + let qe_iden_latest_issue = qe_chain_latest_issue.max(qe_issue); + let qe_iden_earliest_expiration = qe_chain_earliest_expiration.min(qe_next); + + Ok(CollateralDates { + earliest_issue, + latest_issue, + earliest_expiration, + qe_iden_earliest_issue, + qe_iden_latest_issue, + qe_iden_earliest_expiration, }) } @@ -774,36 +1371,21 @@ pub mod ring { out } + fn ring_sha384(data: &[u8]) -> [u8; 48] { + let digest = ::ring::digest::digest(&::ring::digest::SHA384, data); + let mut out = [0u8; 48]; + out.copy_from_slice(digest.as_ref()); + out + } + /// Returns a [`CryptoBackend`] backed by ring. pub fn backend() -> CryptoBackend { CryptoBackend { sig_algo: webpki::ring::ECDSA_P256_SHA256, sha256: ring_sha256, + sha384: ring_sha384, } } - - /// Verify a quote using Intel's trusted root CA and ring backend. - pub fn verify( - raw_quote: &[u8], - collateral: &QuoteCollateralV3, - now_secs: u64, - ) -> Result { - QuoteVerifier::new(TRUSTED_ROOT_CA_DER.to_vec(), backend()) - .verify(raw_quote, collateral, now_secs) - } - - /// Verify a quote using Intel's trusted root CA and ring backend, - /// passing a function to override TCB info after the signature check - #[cfg(feature = "danger-allow-tcb-override")] - pub fn dangerous_verify_with_tcb_override( - raw_quote: &[u8], - collateral: &QuoteCollateralV3, - now_secs: u64, - override_tcb_info: impl FnOnce(TcbInfo) -> TcbInfo, - ) -> Result { - QuoteVerifier::new(TRUSTED_ROOT_CA_DER.to_vec(), backend()) - .dangerous_verify_with_tcb_override(raw_quote, collateral, now_secs, override_tcb_info) - } } /// RustCrypto backend module. @@ -818,76 +1400,21 @@ pub mod rustcrypto { sha2::Sha256::digest(data).into() } + fn rustcrypto_sha384(data: &[u8]) -> [u8; 48] { + use sha2::Digest; + sha2::Sha384::digest(data).into() + } + /// Returns a [`CryptoBackend`] backed by RustCrypto. pub fn backend() -> CryptoBackend { CryptoBackend { sig_algo: webpki::rustcrypto::ECDSA_P256_SHA256, sha256: rustcrypto_sha256, + sha384: rustcrypto_sha384, } } - - /// Verify a quote using Intel's trusted root CA and RustCrypto backend. - pub fn verify( - raw_quote: &[u8], - collateral: &QuoteCollateralV3, - now_secs: u64, - ) -> Result { - QuoteVerifier::new(TRUSTED_ROOT_CA_DER.to_vec(), backend()) - .verify(raw_quote, collateral, now_secs) - } - - /// Verify a quote using Intel's trusted root CA and RustCrypto backend, - /// passing a function to override TCB info after the signature check - #[cfg(feature = "danger-allow-tcb-override")] - pub fn dangerous_verify_with_tcb_override( - raw_quote: &[u8], - collateral: &QuoteCollateralV3, - now_secs: u64, - override_tcb_info: impl FnOnce(TcbInfo) -> TcbInfo, - ) -> Result { - QuoteVerifier::new(TRUSTED_ROOT_CA_DER.to_vec(), backend()) - .dangerous_verify_with_tcb_override(raw_quote, collateral, now_secs, override_tcb_info) - } } -/// Verify a quote using Intel's trusted root CA (ring backend). -/// -/// This is a backwards-compatible convenience function that uses the ring backend. -/// For rustcrypto, use [`rustcrypto::verify()`]. -/// -/// # Arguments -/// -/// * `raw_quote` - The raw quote to verify. Supported SGX and TDX quotes. -/// * `quote_collateral` - The quote collateral to verify. Can be obtained from PCCS by `get_collateral`. -/// * `now` - The current time in seconds since the Unix epoch -/// -/// # Returns -/// -/// * `Ok(VerifiedReport)` - The verified report -/// * `Err(Error)` - The error -#[cfg(feature = "_anycrypto")] -pub use self::default_crypto::verify; - -/// Verify a quote using Intel's trusted root CA (ring backend), passing a function which modifies -/// TCB info after the signature check. -/// -/// This is a backwards-compatible convenience function that uses the ring backend. -/// For rustcrypto, use [`rustcrypto::dangerous_verify_with_tcb_override()`]. -/// -/// # Arguments -/// -/// * `raw_quote` - The raw quote to verify. Supported SGX and TDX quotes. -/// * `quote_collateral` - The quote collateral to verify. Can be obtained from PCCS by `get_collateral`. -/// * `now` - The current time in seconds since the Unix epoch -/// * `override_tcb_info` - a function which modifies TCB info after the signature check -/// -/// # Returns -/// -/// * `Ok(VerifiedReport)` - The verified report -/// * `Err(Error)` - The error -#[cfg(all(feature = "_anycrypto", feature = "danger-allow-tcb-override"))] -pub use self::default_crypto::dangerous_verify_with_tcb_override; - // ============================================================================= // Step 6 & 9: Verify QE Report policy and match QE TCB // ============================================================================= @@ -901,11 +1428,11 @@ pub use self::default_crypto::dangerous_verify_with_tcb_override; /// - ATTRIBUTES match after applying the mask /// - ISVSVN meets minimum requirement from QE Identity TCB levels (Step 9) /// -/// Returns the QE TCB status and advisory IDs based on the QE's ISVSVN. +/// Returns the matched QeTcbLevel based on the QE's ISVSVN. fn verify_qe_identity_policy( qe_report: &EnclaveReport, qe_identity: &QeIdentity, -) -> Result { +) -> Result { // Verify MRSIGNER if qe_report.mr_signer != qe_identity.mrsigner { bail!( @@ -968,17 +1495,14 @@ fn verify_qe_identity_policy( /// Match QE ISVSVN against QE Identity TCB levels /// /// TCB levels are expected to be sorted from highest to lowest ISVSVN. -/// Returns the status and advisory IDs for the matching level. +/// Returns the matched QeTcbLevel. fn match_qe_tcb_level( isv_svn: u16, tcb_levels: &[crate::qe_identity::QeTcbLevel], -) -> Result { +) -> Result { for tcb_level in tcb_levels { if isv_svn >= tcb_level.tcb.isvsvn { - return Ok(TcbStatusWithAdvisory::new( - tcb_level.tcb_status, - tcb_level.advisory_ids.clone(), - )); + return Ok(tcb_level.clone()); } } @@ -1181,9 +1705,9 @@ mod tests { let result = verify_qe_identity_policy(&qe_report, &qe_identity); assert!(result.is_ok()); - let status = result.unwrap(); - assert_eq!(status.status, UpToDate); - assert!(status.advisory_ids.is_empty()); + let tcb_level = result.unwrap(); + assert_eq!(tcb_level.tcb_status, UpToDate); + assert!(tcb_level.advisory_ids.is_empty()); } #[test] @@ -1194,9 +1718,9 @@ mod tests { let result = verify_qe_identity_policy(&qe_report, &qe_identity); assert!(result.is_ok()); - let status = result.unwrap(); - assert_eq!(status.status, OutOfDate); - assert_eq!(status.advisory_ids, vec!["INTEL-SA-00615"]); + let tcb_level = result.unwrap(); + assert_eq!(tcb_level.tcb_status, OutOfDate); + assert_eq!(tcb_level.advisory_ids, vec!["INTEL-SA-00615"]); } #[test] @@ -1207,8 +1731,8 @@ mod tests { let result = verify_qe_identity_policy(&qe_report, &qe_identity); assert!(result.is_ok()); - let status = result.unwrap(); - assert_eq!(status.status, UpToDate); // Matches first level (isvsvn >= 8) + let tcb_level = result.unwrap(); + assert_eq!(tcb_level.tcb_status, UpToDate); // Matches first level (isvsvn >= 8) } #[test] @@ -1235,9 +1759,9 @@ mod tests { let result = verify_qe_identity_policy(&qe_report, &qe_identity); assert!(result.is_ok()); - let status = result.unwrap(); + let tcb_level = result.unwrap(); // Should match level with isvsvn=6 (7 >= 6) - assert_eq!(status.status, OutOfDate); - assert_eq!(status.advisory_ids, vec!["INTEL-SA-00615"]); + assert_eq!(tcb_level.tcb_status, OutOfDate); + assert_eq!(tcb_level.advisory_ids, vec!["INTEL-SA-00615"]); } } diff --git a/tests/esbuild/src/main.ts b/tests/esbuild/src/main.ts index 5218350..a13dc24 100644 --- a/tests/esbuild/src/main.ts +++ b/tests/esbuild/src/main.ts @@ -1,4 +1,4 @@ -import init, { js_verify, js_get_collateral } from "@phala/dcap-qvl-web"; +import init, { QuoteVerifier, SimplePolicy } from "@phala/dcap-qvl-web"; import wasm from "@phala/dcap-qvl-web/dcap-qvl-web_bg.wasm"; const PCCS_URL = "https://pccs.phala.network/tdx/certification/v4"; @@ -14,12 +14,14 @@ async function fetchQuoteAsUint8Array(url: string): Promise { init(wasm).then(() => { console.log("Phala DCAP QVL initialized!"); - // You can now use js_verify, js_get_collateral, etc. fetchQuoteAsUint8Array("/sample/tdx_quote").then(async (rawQuote) => { - const quoteCollateral = await js_get_collateral(PCCS_URL, rawQuote); + const quoteCollateral = await QuoteVerifier.get_collateral(PCCS_URL, rawQuote); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify(rawQuote, quoteCollateral, now); - console.log("Verification Result:", result); + const verifier = new QuoteVerifier(); + const result = verifier.verify(rawQuote, quoteCollateral, now); + const policy = new SimplePolicy(now); + const report = result.validate(policy); + console.log("Verification Result:", report); }); }).catch((error: unknown) => { console.error("Error:", error); diff --git a/tests/js/README.md b/tests/js/README.md index 640118b..80bdcb9 100644 --- a/tests/js/README.md +++ b/tests/js/README.md @@ -29,6 +29,7 @@ See [TEST_WEB.md](TEST_WEB.md) for detailed web testing documentation. ```bash cd tests/js node verify_quote_node.js +node verify_quote_rego_node.js ``` ### Verify Quote in Web Browser diff --git a/tests/js/get_collateral_node.js b/tests/js/get_collateral_node.js index 394e156..41ec405 100644 --- a/tests/js/get_collateral_node.js +++ b/tests/js/get_collateral_node.js @@ -1,6 +1,6 @@ const fs = require("fs"); const path = require("path"); -const { js_get_collateral } = require("../../pkg/node/dcap-qvl-node"); +const { QuoteVerifier } = require("../../pkg/node/dcap-qvl-node"); // Function to read a file as a Uint8Array function readFileAsUint8Array(filePath) { @@ -16,7 +16,7 @@ const rawQuote = readFileAsUint8Array(rawQuotePath); try { // Call the js_get_collateral function for TDX quote let pccs_url = "https://pccs.phala.network/tdx/certification/v4"; - const result = await js_get_collateral(pccs_url, rawQuote); + const result = await QuoteVerifier.get_collateral(pccs_url, rawQuote); console.log("Collateral Result:", result); } catch (error) { console.error("Get collateral failed:", error); diff --git a/tests/js/get_collateral_web.js b/tests/js/get_collateral_web.js index 3a1d52b..8f865fb 100644 --- a/tests/js/get_collateral_web.js +++ b/tests/js/get_collateral_web.js @@ -1,4 +1,4 @@ -import init, { js_get_collateral } from "/pkg/web/dcap-qvl-web.js"; +import init, { QuoteVerifier } from "/pkg/web/dcap-qvl-web.js"; // Function to fetch a file as a Uint8Array async function fetchFileAsUint8Array(url) { @@ -18,9 +18,8 @@ async function getCollateral() { const rawQuote = await fetchFileAsUint8Array(rawQuoteUrl); - // Call the js_get_collateral function for TDX quote let pccs_url = "https://pccs.phala.network/tdx/certification/v4"; - const result = await js_get_collateral(pccs_url, rawQuote); + const result = await QuoteVerifier.get_collateral(pccs_url, rawQuote); console.log("Collateral Result:", result); } catch (error) { console.error("Get collateral failed:", error); diff --git a/tests/js/verify_quote_node.js b/tests/js/verify_quote_node.js index bdd9606..65811d2 100644 --- a/tests/js/verify_quote_node.js +++ b/tests/js/verify_quote_node.js @@ -1,9 +1,6 @@ const fs = require("fs"); const path = require("path"); -const { - js_verify, - js_get_collateral, -} = require("../../pkg/node/dcap-qvl-node"); +const { QuoteVerifier, SimplePolicy } = require("../../pkg/node/dcap-qvl-node"); // Function to read a file as a Uint8Array function readFileAsUint8Array(filePath) { @@ -22,11 +19,13 @@ const now = BigInt(Math.floor(Date.now() / 1000)); (async () => { try { - // Call the js_verify function let pccs_url = "https://pccs.phala.network/tdx/certification/v4"; - const quoteCollateral = await js_get_collateral(pccs_url, rawQuote); - const result = js_verify(rawQuote, quoteCollateral, now); - console.log("Verification Result:", result); + const quoteCollateral = await QuoteVerifier.get_collateral(pccs_url, rawQuote); + const verifier = new QuoteVerifier(); + const result = verifier.verify(rawQuote, quoteCollateral, now); + const policy = new SimplePolicy(now); + const report = result.validate(policy); + console.log("Verification Result:", report); } catch (error) { console.error("Verification failed:", error); } diff --git a/tests/js/verify_quote_rego_node.js b/tests/js/verify_quote_rego_node.js new file mode 100644 index 0000000..c1127c7 --- /dev/null +++ b/tests/js/verify_quote_rego_node.js @@ -0,0 +1,46 @@ +const fs = require("fs"); +const path = require("path"); +const { + QuoteVerifier, + RegoPolicy, + RegoPolicySet, +} = require("../../pkg/node/dcap-qvl-node"); + +function readFileAsUint8Array(filePath) { + const data = fs.readFileSync(filePath); + return new Uint8Array(data); +} + +const rawQuotePath = path.join(__dirname, "../../sample", "sgx_quote"); +const rawQuote = readFileAsUint8Array(rawQuotePath); +const now = BigInt(Math.floor(Date.now() / 1000)); + +const platformPolicyJson = JSON.stringify({ + environment: { + class_id: "3123ec35-8d38-4ea5-87a5-d6c48b567570", + }, + reference: { + accepted_tcb_status: ["UpToDate"], + collateral_grace_period: 0, + }, +}); + +(async () => { + try { + const pccsUrl = "https://pccs.phala.network/sgx/certification/v4"; + const collateral = await QuoteVerifier.get_collateral(pccsUrl, rawQuote); + const verifier = new QuoteVerifier(); + + const regoReport = verifier + .verify(rawQuote, collateral, now) + .validate_rego(new RegoPolicy(platformPolicyJson)); + console.log("RegoPolicy report:", regoReport); + + const regoSetReport = verifier + .verify(rawQuote, collateral, now) + .validate_rego_set(new RegoPolicySet([platformPolicyJson])); + console.log("RegoPolicySet report:", regoSetReport); + } catch (error) { + console.error("Rego verification failed:", error); + } +})(); diff --git a/tests/js/verify_quote_web.js b/tests/js/verify_quote_web.js index a00be0b..a5c1a75 100644 --- a/tests/js/verify_quote_web.js +++ b/tests/js/verify_quote_web.js @@ -1,4 +1,4 @@ -import init, { js_verify, js_get_collateral } from "/pkg/web/dcap-qvl-web.js"; +import init, { QuoteVerifier, SimplePolicy } from "/pkg/web/dcap-qvl-web.js"; // Function to fetch a file as a Uint8Array async function fetchFileAsUint8Array(url) { @@ -26,14 +26,17 @@ async function loadFilesAndVerify() { // Get the quote collateral let pccs_url = "https://pccs.phala.network/tdx/certification/v4"; - const quoteCollateral = await js_get_collateral(pccs_url, rawQuote); + const quoteCollateral = await QuoteVerifier.get_collateral(pccs_url, rawQuote); // Current timestamp const now = BigInt(Math.floor(Date.now() / 1000)); - // Call the js_verify function - const result = js_verify(rawQuote, quoteCollateral, now); - console.log("Verification Result:", result); + // Verify + const verifier = new QuoteVerifier(); + const result = verifier.verify(rawQuote, quoteCollateral, now); + const policy = new SimplePolicy(now); + const report = result.validate(policy); + console.log("Verification Result:", report); } catch (error) { console.error("Verification failed:", error); } diff --git a/tests/js/verify_quote_web_test.js b/tests/js/verify_quote_web_test.js index dbf2fff..39eed4b 100644 --- a/tests/js/verify_quote_web_test.js +++ b/tests/js/verify_quote_web_test.js @@ -1,4 +1,4 @@ -import init, { js_verify, js_verify_with_root_ca, js_get_collateral } from "/pkg/web/dcap-qvl-web.js"; +import init, { QuoteVerifier, RegoPolicy, RegoPolicySet } from "/pkg/web/dcap-qvl-web.js"; const testOutputs = []; let passed = 0; @@ -90,8 +90,9 @@ async function runTests() { const rootCA = await fetchFile('/test_data/certs/root_ca.der'); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); - if (!result || !result.status) { + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.into_report_unchecked(); + if (!report || !report.status) { throw new Error('Verification should succeed but got no result'); } }); @@ -103,8 +104,9 @@ async function runTests() { const rootCA = await fetchFile('/test_data/certs/root_ca.der'); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); - if (!result || !result.status) { + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.into_report_unchecked(); + if (!report || !report.status) { throw new Error('Verification should succeed but got no result'); } }); @@ -116,8 +118,9 @@ async function runTests() { const rootCA = await fetchFile('/test_data/certs/root_ca.der'); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); - if (!result || !result.status) { + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.into_report_unchecked(); + if (!report || !report.status) { throw new Error('Verification should succeed but got no result'); } }); @@ -129,12 +132,59 @@ async function runTests() { const rootCA = await fetchFile('/test_data/certs/root_ca.der'); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); - if (!result || !result.status) { + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.into_report_unchecked(); + if (!report || !report.status) { throw new Error('Verification should succeed but got no result'); } }); + await runTest('RegoPolicy validates valid SGX v3 quote', async () => { + const quote = await fetchFile('/test_data/samples/valid_sgx_v3/quote.bin'); + const collateral = await fetchJSON('/test_data/samples/valid_sgx_v3/collateral.json'); + const rootCA = await fetchFile('/test_data/certs/root_ca.der'); + const now = BigInt(Math.floor(Date.now() / 1000)); + + const policyJson = JSON.stringify({ + environment: { + class_id: '3123ec35-8d38-4ea5-87a5-d6c48b567570', + }, + reference: { + accepted_tcb_status: ['UpToDate'], + collateral_grace_period: 0, + }, + }); + + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.validate_rego(new RegoPolicy(policyJson)); + if (!report || !report.status) { + throw new Error('RegoPolicy validation should succeed but got no report'); + } + }); + + await runTest('RegoPolicySet validates valid SGX v3 quote', async () => { + const quote = await fetchFile('/test_data/samples/valid_sgx_v3/quote.bin'); + const collateral = await fetchJSON('/test_data/samples/valid_sgx_v3/collateral.json'); + const rootCA = await fetchFile('/test_data/certs/root_ca.der'); + const now = BigInt(Math.floor(Date.now() / 1000)); + + const platformPolicyJson = JSON.stringify({ + environment: { + class_id: '3123ec35-8d38-4ea5-87a5-d6c48b567570', + }, + reference: { + accepted_tcb_status: ['UpToDate'], + collateral_grace_period: 0, + }, + }); + + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.validate_rego_set(new RegoPolicySet([platformPolicyJson])); + if (!report || !report.status) { + throw new Error('RegoPolicySet validation should succeed but got no report'); + } + }); + log(''); log('━━━ Decode Errors ━━━'); @@ -146,7 +196,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { // WASM errors might be strings or objects @@ -165,7 +215,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { // WASM errors might be strings or objects @@ -184,7 +234,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -205,7 +255,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { // WASM errors might be strings or objects @@ -224,7 +274,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -245,7 +295,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -263,7 +313,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -284,7 +334,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -305,7 +355,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -323,7 +373,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -344,7 +394,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -365,7 +415,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -386,7 +436,7 @@ async function runTests() { const now = BigInt(Math.floor(Date.now() / 1000)); try { - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); throw new Error('Should have failed but succeeded'); } catch (error) { const errorStr = typeof error === 'string' ? error : (error.message || String(error)); @@ -407,8 +457,9 @@ async function runTests() { const rootCA = await fetchFile("/test_data/certs/root_ca.der"); const now = BigInt(Math.floor(Date.now() / 1000)); - const result = js_verify_with_root_ca(quote, collateral, rootCA, now); - if (!result || !result.status) { + const result = new QuoteVerifier(rootCA).verify(quote, collateral, now); + const report = result.into_report_unchecked(); + if (!report || !report.status) { throw new Error( "Verification should succeed for PKS enabled quote" ); @@ -422,14 +473,9 @@ async function runTests() { await runTest('Fetch collateral from PCCS', async () => { const quote = await fetchFile('/sample/tdx_quote'); - // Check if get_collateral function is available in Web WASM - if (typeof js_get_collateral !== 'function') { - throw new Error('js_get_collateral function not available in Web WASM'); - } - // Test with HTTP URL (our mock server runs on HTTP) const mockPccsUrl = 'http://localhost:8765/tdx/certification/v4'; - const result = js_get_collateral(mockPccsUrl, quote); + const result = QuoteVerifier.get_collateral(mockPccsUrl, quote); // The function should return a promise in Web WASM just like in Node.js if (!result || typeof result.then !== 'function') { diff --git a/tests/near/contracts/gas-test/Cargo.lock b/tests/near/contracts/gas-test/Cargo.lock index 74d1e4c..d47fa2e 100644 --- a/tests/near/contracts/gas-test/Cargo.lock +++ b/tests/near/contracts/gas-test/Cargo.lock @@ -592,7 +592,7 @@ dependencies = [ [[package]] name = "dcap-qvl" -version = "0.3.10" +version = "0.3.12" dependencies = [ "anyhow", "asn1_der", diff --git a/tests/near/contracts/gas-test/src/lib.rs b/tests/near/contracts/gas-test/src/lib.rs index c0eb7af..22a3c4a 100644 --- a/tests/near/contracts/gas-test/src/lib.rs +++ b/tests/near/contracts/gas-test/src/lib.rs @@ -1,6 +1,6 @@ extern crate alloc; -use dcap_qvl::{verify::verify, QuoteCollateralV3}; +use dcap_qvl::{verify::{QuoteVerifier, ring}, QuoteCollateralV3}; use hex::decode; use near_sdk::{env, log, near}; @@ -57,9 +57,10 @@ impl Contract { // Get current timestamp in seconds let timestamp_s = get_block_timestamp_secs(); - // Call dcap-qvl::verify::verify() directly - match verify("e_bytes, &collateral_data, timestamp_s) { - Ok(_verified_report) => { + // Call dcap-qvl verify + let verifier = QuoteVerifier::new_prod(ring::backend()); + match verifier.verify("e_bytes, collateral_data, timestamp_s) { + Ok(_supplemental) => { log!("Verification result: Success"); true } diff --git a/tests/test_case.js b/tests/test_case.js index 7bad017..4adb62d 100755 --- a/tests/test_case.js +++ b/tests/test_case.js @@ -87,15 +87,13 @@ async function cmdVerify(args) { } try { - let result; - if (rootCaDer) { - result = wasmModule.js_verify_with_root_ca(quoteBytes, collateral, rootCaDer, now); - } else { - result = wasmModule.js_verify(quoteBytes, collateral, now); - } - + const verifier = rootCaDer + ? new wasmModule.QuoteVerifier(rootCaDer) + : new wasmModule.QuoteVerifier(); + const result = verifier.verify(quoteBytes, collateral, now); + const report = result.into_report_unchecked(); console.log("Verification successful"); - console.log(`Status: ${result.status}`); + console.log(`Status: ${report.status}`); process.exit(0); } catch (e) { console.error(`Verification failed: ${e}`); @@ -136,7 +134,7 @@ async function cmdGetCollateral(args) { } try { - const result = await wasmModule.js_get_collateral(pccsUrl, quoteBytes); + const result = await wasmModule.QuoteVerifier.get_collateral(pccsUrl, quoteBytes); if (!result || !result.tcb_info_issuer_chain) { console.error("Error: Collateral missing required fields"); diff --git a/tests/verify_quote.rs b/tests/verify_quote.rs index 098a411..896cb77 100644 --- a/tests/verify_quote.rs +++ b/tests/verify_quote.rs @@ -1,4 +1,9 @@ -#![allow(clippy::unwrap_used, clippy::expect_used)] +#![allow( + clippy::unwrap_used, + clippy::expect_used, + clippy::indexing_slicing, + clippy::arithmetic_side_effects +)] use dcap_qvl::{quote::Quote, verify::VerifiedReport, QuoteCollateralV3}; use der::Decode as DerDecode; @@ -11,14 +16,25 @@ pub fn verify( collateral: &QuoteCollateralV3, now_secs: u64, ) -> anyhow::Result { - use dcap_qvl::verify::{ring, rustcrypto}; - let ring_result = ring::verify(raw_quote, collateral, now_secs); - let rustcrypto_result = rustcrypto::verify(raw_quote, collateral, now_secs); + use dcap_qvl::verify::{ring, rustcrypto, QuoteVerifier}; + + let ring_verifier = QuoteVerifier::new_prod(ring::backend()); + let rustcrypto_verifier = QuoteVerifier::new_prod(rustcrypto::backend()); + + let ring_result = ring_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|s| s.into_report_unchecked()); + let rustcrypto_result = rustcrypto_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|s| s.into_report_unchecked()); + assert_eq!( ring_result.map_err(|e| e.to_string()), rustcrypto_result.map_err(|e| e.to_string()) ); - ring::verify(raw_quote, collateral, now_secs) + ring_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|s| s.into_report_unchecked()) } fn now_from_collateral(collateral: &QuoteCollateralV3) -> u64 { @@ -88,6 +104,192 @@ fn could_parse_sgx_quote() { ); } +/// Cross-validate all SupplementalData fields against independently computed values. +#[test] +fn sgx_supplemental_data_cross_validation() { + use dcap_qvl::verify::{ring, QuoteVerifier}; + use dcap_qvl::PckCertFlag; + + let raw_quote = include_bytes!("../sample/sgx_quote").to_vec(); + let collateral: QuoteCollateralV3 = + serde_json::from_slice(include_bytes!("../sample/sgx_quote_collateral.json")).unwrap(); + let now = now_from_collateral(&collateral); + + let verifier = QuoteVerifier::new_prod(ring::backend()); + let result = verifier + .verify(&raw_quote, collateral.clone(), now) + .unwrap(); + let s = &result.supplemental().unwrap(); + + // Parse quote for later use + let parsed_quote = Quote::decode(&mut &raw_quote[..]).unwrap(); + + // ── TCB status ────────────────────────────────────────────────────── + assert_eq!( + s.tcb.status.to_string(), + "ConfigurationAndSWHardeningNeeded" + ); + assert_eq!(s.tcb.advisory_ids, ["INTEL-SA-00289", "INTEL-SA-00615"]); + + // earliest_expiration is computed lazily in supplemental() + assert!(s.earliest_expiration_date > 0); + + // ── tcb_date_tag ──────────────────────────────────────────────────── + let expected_tcb_date = chrono::DateTime::parse_from_rfc3339(&s.platform.tcb_level.tcb_date) + .unwrap() + .timestamp() as u64; + assert_eq!(s.platform.tcb_date_tag, expected_tcb_date); + + // ── CRL numbers ───────────────────────────────────────────────────── + fn extract_crl_num(crl_der: &[u8]) -> u32 { + let crl = CertificateList::from_der(crl_der).unwrap(); + if let Some(exts) = &crl.tbs_cert_list.crl_extensions { + for ext in exts.iter() { + if ext.extn_id.to_string() == "2.5.29.20" { + let num = + ::from_der(ext.extn_value.as_bytes()) + .unwrap(); + let bytes = num.as_bytes(); + let mut val: u32 = 0; + for &b in bytes { + val = (val << 8) | u32::from(b); + } + return val; + } + } + } + 0 + } + assert_eq!( + s.platform.root_ca_crl_num, + extract_crl_num(&collateral.root_ca_crl) + ); + assert_eq!(s.platform.pck_crl_num, extract_crl_num(&collateral.pck_crl)); + + // ── tcb_eval_data_number ──────────────────────────────────────────── + let tcb_info_parsed: dcap_qvl::TcbInfo = serde_json::from_str(&collateral.tcb_info).unwrap(); + let qe_id_parsed: dcap_qvl::QeIdentity = serde_json::from_str(&collateral.qe_identity).unwrap(); + let expected_eval_num = tcb_info_parsed + .tcb_evaluation_data_number + .min(qe_id_parsed.tcb_evaluation_data_number); + assert_eq!(s.tcb.eval_data_number, expected_eval_num); + + // ── root_key_id ───────────────────────────────────────────────────── + let root_ca_der = include_bytes!("../src/TrustedRootCA.der"); + let root_cert: x509_cert::Certificate = DerDecode::from_der(root_ca_der).unwrap(); + let raw_pub_key = root_cert + .tbs_certificate + .subject_public_key_info + .subject_public_key + .raw_bytes(); + let expected_root_key_id: [u8; 48] = { + use sha2::Digest; + sha2::Sha384::digest(raw_pub_key).into() + }; + assert_eq!(s.platform.root_key_id, expected_root_key_id); + + // ── PCK certificate fields ────────────────────────────────────────── + let pck_chain_der = dcap_qvl::intel::extract_cert_chain(&parsed_quote).unwrap(); + let pck_ext = dcap_qvl::intel::parse_pck_extension(&pck_chain_der[0]).unwrap(); + + assert_eq!(s.platform.pck.cpu_svn, pck_ext.cpu_svn); + assert_eq!(s.platform.pck.pce_svn, pck_ext.pce_svn); + assert_eq!(s.platform.pck.fmspc, pck_ext.fmspc); + assert_eq!(s.platform.pck.ppid, pck_ext.ppid); + assert_eq!(s.platform.pck.sgx_type, pck_ext.sgx_type as u8); + + let expected_pce_id = pck_ext.pce_id.clone(); + assert_eq!(s.platform.pck.pce_id, expected_pce_id); + + // ── TEE type ──────────────────────────────────────────────────────── + assert_eq!(s.tee_type, 0x00000000); // SGX + + // ── Platform instance (Processor CA → should be Undefined) ────────── + assert_eq!(s.platform.pck.dynamic_platform, PckCertFlag::Undefined); + assert_eq!(s.platform.pck.cached_keys, PckCertFlag::Undefined); + assert_eq!(s.platform.pck.smt_enabled, PckCertFlag::Undefined); + + // ── TCB levels ────────────────────────────────────────────────────── + assert!(!s.platform.tcb_level.tcb_date.is_empty()); + assert!(!s.qe.tcb_level.tcb_date.is_empty()); + + // Verify ring and rustcrypto produce identical supplemental data + let rustcrypto_verifier = QuoteVerifier::new_prod(dcap_qvl::verify::rustcrypto::backend()); + let rc_result = rustcrypto_verifier + .verify(&raw_quote, collateral.clone(), now) + .unwrap(); + let rc = &rc_result.supplemental().unwrap(); + assert_eq!(s.tcb.status, rc.tcb.status); + assert_eq!(s.tcb.advisory_ids, rc.tcb.advisory_ids); + assert_eq!(s.earliest_expiration_date, rc.earliest_expiration_date); + assert_eq!(s.platform.tcb_date_tag, rc.platform.tcb_date_tag); + assert_eq!(s.platform.pck_crl_num, rc.platform.pck_crl_num); + assert_eq!(s.platform.root_ca_crl_num, rc.platform.root_ca_crl_num); + assert_eq!(s.tcb.eval_data_number, rc.tcb.eval_data_number); + assert_eq!(s.platform.root_key_id, rc.platform.root_key_id); + assert_eq!(s.platform.pck.ppid, rc.platform.pck.ppid); + assert_eq!(s.platform.pck.cpu_svn, rc.platform.pck.cpu_svn); + assert_eq!(s.platform.pck.pce_svn, rc.platform.pck.pce_svn); + assert_eq!(s.platform.pck.pce_id, rc.platform.pck.pce_id); + assert_eq!(s.platform.pck.fmspc, rc.platform.pck.fmspc); + assert_eq!(s.tee_type, rc.tee_type); + assert_eq!(s.platform.pck.sgx_type, rc.platform.pck.sgx_type); + assert_eq!( + s.platform.pck.platform_instance_id, + rc.platform.pck.platform_instance_id + ); + assert_eq!( + s.platform.pck.dynamic_platform, + rc.platform.pck.dynamic_platform + ); + assert_eq!(s.platform.pck.cached_keys, rc.platform.pck.cached_keys); + assert_eq!(s.platform.pck.smt_enabled, rc.platform.pck.smt_enabled); +} + +#[test] +fn supplemental_uses_quote_embedded_pck_chain_when_collateral_omits_it() { + use dcap_qvl::verify::{ring, QuoteVerifier}; + + let raw_quote = include_bytes!("../sample/sgx_quote").to_vec(); + let collateral_without_chain: QuoteCollateralV3 = + serde_json::from_slice(include_bytes!("../sample/sgx_quote_collateral.json")).unwrap(); + assert!(collateral_without_chain.pck_certificate_chain.is_none()); + let now = now_from_collateral(&collateral_without_chain); + + let parsed_quote = Quote::decode(&mut &raw_quote[..]).unwrap(); + let embedded_pem = String::from_utf8_lossy(parsed_quote.raw_cert_chain().unwrap()) + .trim_end_matches('\0') + .to_string(); + + let mut collateral_with_chain = collateral_without_chain.clone(); + collateral_with_chain.pck_certificate_chain = Some(embedded_pem); + + let verifier = QuoteVerifier::new_prod(ring::backend()); + let without_chain = verifier + .verify(&raw_quote, collateral_without_chain, now) + .unwrap() + .supplemental() + .unwrap(); + let with_chain = verifier + .verify(&raw_quote, collateral_with_chain, now) + .unwrap() + .supplemental() + .unwrap(); + + assert_eq!( + without_chain.earliest_issue_date, + with_chain.earliest_issue_date + ); + assert_eq!( + without_chain.latest_issue_date, + with_chain.latest_issue_date + ); + assert_eq!( + without_chain.earliest_expiration_date, + with_chain.earliest_expiration_date + ); +} + #[test] fn could_parse_tdx_quote() { let raw_quote = include_bytes!("../sample/tdx_quote"); @@ -101,3 +303,213 @@ fn could_parse_tdx_quote() { assert_eq!(tcb_status.status, "UpToDate"); assert!(tcb_status.advisory_ids.is_empty()); } + +/// Print key SupplementalData fields for both SGX and TDX quotes. +#[test] +fn print_supplemental_data_comparison() { + use dcap_qvl::verify::{ring, QuoteVerifier}; + + fn ts_to_utc(ts: u64) -> String { + chrono::DateTime::from_timestamp(ts as i64, 0) + .map(|dt| dt.format("%Y-%m-%dT%H:%M:%SZ").to_string()) + .unwrap_or_else(|| format!("{ts}")) + } + + let verifier = QuoteVerifier::new_prod(ring::backend()); + + // ═══════════════════════════════════════════════════════════════════ + // SGX Quote + // ═══════════════════════════════════════════════════════════════════ + println!("\n{:=<80}", ""); + println!("SGX Quote — SupplementalData"); + println!("{:=<80}", ""); + + let raw_quote = include_bytes!("../sample/sgx_quote").to_vec(); + let collateral: QuoteCollateralV3 = + serde_json::from_slice(include_bytes!("../sample/sgx_quote_collateral.json")).unwrap(); + let now = now_from_collateral(&collateral); + + let result = verifier + .verify(&raw_quote, collateral.clone(), now) + .unwrap(); + let s = &result.supplemental().unwrap(); + + println!("{:<40} {:?}", "tcb.status", s.tcb.status); + println!("{:<40} {:?}", "tcb.advisory_ids", s.tcb.advisory_ids); + println!( + "{:<40} {} ({})", + "tcb.earliest_expiration", + s.earliest_expiration_date, + ts_to_utc(s.earliest_expiration_date) + ); + println!("{:<40} {}", "tcb.eval_data_number", s.tcb.eval_data_number); + println!( + "{:<40} {} ({})", + "platform.tcb_date_tag", + s.platform.tcb_date_tag, + ts_to_utc(s.platform.tcb_date_tag) + ); + println!("{:<40} {}", "platform.pck_crl_num", s.platform.pck_crl_num); + println!( + "{:<40} {}", + "platform.root_ca_crl_num", s.platform.root_ca_crl_num + ); + println!( + "{:<40} {}...", + "platform.root_key_id", + hex::encode(&s.platform.root_key_id[..24]) + ); + println!( + "{:<40} {}", + "platform.pck.fmspc", + hex::encode(s.platform.pck.fmspc) + ); + println!( + "{:<40} {}", + "platform.pck.sgx_type", s.platform.pck.sgx_type + ); + println!( + "{:<40} {:?}", + "platform.pck.dynamic_platform", s.platform.pck.dynamic_platform + ); + println!( + "{:<40} {:?}", + "platform.pck.cached_keys", s.platform.pck.cached_keys + ); + println!( + "{:<40} {:?}", + "platform.pck.smt_enabled", s.platform.pck.smt_enabled + ); + println!("{:<40} 0x{:08X}", "tee_type", s.tee_type); + println!( + "{:<40} {:?}", + "platform.tcb_level.tcb_status", s.platform.tcb_level.tcb_status + ); + println!( + "{:<40} {:?}", + "qe.tcb_level.tcb_status", s.qe.tcb_level.tcb_status + ); + + // ═══════════════════════════════════════════════════════════════════ + // TDX Quote + // ═══════════════════════════════════════════════════════════════════ + println!("\n{:=<80}", ""); + println!("TDX Quote — SupplementalData"); + println!("{:=<80}", ""); + + let raw_quote_tdx = include_bytes!("../sample/tdx_quote"); + let collateral_tdx: QuoteCollateralV3 = + serde_json::from_slice(include_bytes!("../sample/tdx_quote_collateral.json")).unwrap(); + let now_tdx = now_from_collateral(&collateral_tdx); + + let result_tdx = verifier + .verify(raw_quote_tdx, collateral_tdx.clone(), now_tdx) + .unwrap(); + let t = &result_tdx.supplemental().unwrap(); + + println!("{:<40} {:?}", "tcb.status", t.tcb.status); + println!("{:<40} {:?}", "tcb.advisory_ids", t.tcb.advisory_ids); + println!( + "{:<40} {} ({})", + "tcb.earliest_expiration", + t.earliest_expiration_date, + ts_to_utc(t.earliest_expiration_date) + ); + println!("{:<40} {}", "tcb.eval_data_number", t.tcb.eval_data_number); + println!( + "{:<40} {} ({})", + "platform.tcb_date_tag", + t.platform.tcb_date_tag, + ts_to_utc(t.platform.tcb_date_tag) + ); + println!("{:<40} {}", "platform.pck_crl_num", t.platform.pck_crl_num); + println!( + "{:<40} {}", + "platform.root_ca_crl_num", t.platform.root_ca_crl_num + ); + println!( + "{:<40} {}...", + "platform.root_key_id", + hex::encode(&t.platform.root_key_id[..24]) + ); + println!( + "{:<40} {}", + "platform.pck.fmspc", + hex::encode(t.platform.pck.fmspc) + ); + println!( + "{:<40} {}", + "platform.pck.sgx_type", t.platform.pck.sgx_type + ); + println!( + "{:<40} {:?}", + "platform.pck.dynamic_platform", t.platform.pck.dynamic_platform + ); + println!( + "{:<40} {:?}", + "platform.pck.cached_keys", t.platform.pck.cached_keys + ); + println!( + "{:<40} {:?}", + "platform.pck.smt_enabled", t.platform.pck.smt_enabled + ); + println!("{:<40} 0x{:08X}", "tee_type", t.tee_type); + println!( + "{:<40} {:?}", + "platform.tcb_level.tcb_status", t.platform.tcb_level.tcb_status + ); + println!( + "{:<40} {:?}", + "qe.tcb_level.tcb_status", t.qe.tcb_level.tcb_status + ); +} + +/// Cross-validate TDX supplemental data fields. +#[test] +fn tdx_supplemental_data_cross_validation() { + use dcap_qvl::verify::{ring, QuoteVerifier}; + + let raw_quote = include_bytes!("../sample/tdx_quote"); + let collateral: QuoteCollateralV3 = + serde_json::from_slice(include_bytes!("../sample/tdx_quote_collateral.json")).unwrap(); + let now = now_from_collateral(&collateral); + + let verifier = QuoteVerifier::new_prod(ring::backend()); + let result = verifier.verify(raw_quote, collateral.clone(), now).unwrap(); + let s = &result.supplemental().unwrap(); + + // TDX quote should have tee_type = 0x81 + assert_eq!(s.tee_type, 0x00000081); + assert_eq!(s.tcb.status.to_string(), "UpToDate"); + assert!(s.tcb.advisory_ids.is_empty()); + + // Fields should be populated (computed lazily in supplemental()) + assert!(s.earliest_expiration_date > 0); + assert!(s.platform.tcb_date_tag > 0); + + // root_key_id should match SHA-384 of Intel root CA raw public key bytes + let root_ca_der = include_bytes!("../src/TrustedRootCA.der"); + let root_cert: x509_cert::Certificate = DerDecode::from_der(root_ca_der).unwrap(); + let raw_pub_key = root_cert + .tbs_certificate + .subject_public_key_info + .subject_public_key + .raw_bytes(); + let expected_root_key_id: [u8; 48] = { + use sha2::Digest; + sha2::Sha384::digest(raw_pub_key).into() + }; + assert_eq!(s.platform.root_key_id, expected_root_key_id); + + // Verify ring == rustcrypto for all fields + let rc_verifier = QuoteVerifier::new_prod(dcap_qvl::verify::rustcrypto::backend()); + let rc_result = rc_verifier + .verify(raw_quote, collateral.clone(), now) + .unwrap(); + let rc = &rc_result.supplemental().unwrap(); + assert_eq!(s.tee_type, rc.tee_type); + assert_eq!(s.tcb.status, rc.tcb.status); + assert_eq!(s.platform.root_key_id, rc.platform.root_key_id); + assert_eq!(s.earliest_expiration_date, rc.earliest_expiration_date); + assert_eq!(s.tcb.eval_data_number, rc.tcb.eval_data_number); +} diff --git a/tests/verify_tcb_override.rs b/tests/verify_tcb_override.rs index b4f59b6..7044377 100644 --- a/tests/verify_tcb_override.rs +++ b/tests/verify_tcb_override.rs @@ -1,4 +1,9 @@ -#![allow(clippy::unwrap_used, clippy::expect_used)] +#![allow( + clippy::unwrap_used, + clippy::expect_used, + clippy::indexing_slicing, + clippy::arithmetic_side_effects +)] #[cfg(feature = "danger-allow-tcb-override")] mod tests { @@ -17,14 +22,23 @@ mod tests { collateral: &QuoteCollateralV3, now_secs: u64, ) -> anyhow::Result { - use dcap_qvl::verify::{ring, rustcrypto}; - let ring_result = ring::verify(raw_quote, collateral, now_secs); - let rustcrypto_result = rustcrypto::verify(raw_quote, collateral, now_secs); + use dcap_qvl::verify::{ring, rustcrypto, QuoteVerifier}; + let ring_verifier = QuoteVerifier::new_prod(ring::backend()); + let rustcrypto_verifier = QuoteVerifier::new_prod(rustcrypto::backend()); + + let ring_result = ring_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|result| result.into_report_unchecked()); + let rustcrypto_result = rustcrypto_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|result| result.into_report_unchecked()); assert_eq!( ring_result.map_err(|e| e.to_string()), rustcrypto_result.map_err(|e| e.to_string()) ); - ring::verify(raw_quote, collateral, now_secs) + ring_verifier + .verify(raw_quote, collateral.clone(), now_secs) + .map(|result| result.into_report_unchecked()) } fn dangerous_verify_with_tcb_override( @@ -36,24 +50,38 @@ mod tests { where F: FnOnce(TcbInfo) -> TcbInfo + Copy, { - use dcap_qvl::verify::{ring, rustcrypto}; - let ring_result = ring::dangerous_verify_with_tcb_override( - raw_quote, - collateral, - now_secs, - override_tcb_info, - ); - let rustcrypto_result = rustcrypto::dangerous_verify_with_tcb_override( - raw_quote, - collateral, - now_secs, - override_tcb_info, - ); + use dcap_qvl::verify::{ring, rustcrypto, QuoteVerifier}; + let ring_verifier = QuoteVerifier::new_prod(ring::backend()); + let rustcrypto_verifier = QuoteVerifier::new_prod(rustcrypto::backend()); + + let ring_result = ring_verifier + .dangerous_verify_with_tcb_override( + raw_quote, + collateral.clone(), + now_secs, + override_tcb_info, + ) + .map(|result| result.into_report_unchecked()); + let rustcrypto_result = rustcrypto_verifier + .dangerous_verify_with_tcb_override( + raw_quote, + collateral.clone(), + now_secs, + override_tcb_info, + ) + .map(|result| result.into_report_unchecked()); assert_eq!( ring_result.map_err(|e| e.to_string()), rustcrypto_result.map_err(|e| e.to_string()) ); - ring::dangerous_verify_with_tcb_override(raw_quote, collateral, now_secs, override_tcb_info) + ring_verifier + .dangerous_verify_with_tcb_override( + raw_quote, + collateral.clone(), + now_secs, + override_tcb_info, + ) + .map(|result| result.into_report_unchecked()) } fn force_out_of_date(mut tcb_info: TcbInfo) -> TcbInfo { @@ -135,7 +163,7 @@ mod tests { #[test] fn override_can_change_tcb_result_and_runs_once() { - use dcap_qvl::verify::ring; + use dcap_qvl::verify::{ring, QuoteVerifier}; let raw_quote = include_bytes!("../sample/tdx_quote"); let raw_quote_collateral = include_bytes!("../sample/tdx_quote_collateral.json"); @@ -148,19 +176,22 @@ mod tests { static OVERRIDE_CALLS: AtomicUsize = AtomicUsize::new(0); OVERRIDE_CALLS.store(0, Ordering::SeqCst); - let ring_overridden = ring::dangerous_verify_with_tcb_override( - raw_quote, - "e_collateral, - now, - |mut tcb_info| { - OVERRIDE_CALLS.fetch_add(1, Ordering::SeqCst); - for level in &mut tcb_info.tcb_levels { - level.tcb_status = TcbStatus::OutOfDate; - } - tcb_info - }, - ) - .expect("verify with override"); + let ring_verifier = QuoteVerifier::new_prod(ring::backend()); + let ring_overridden = ring_verifier + .dangerous_verify_with_tcb_override( + raw_quote, + quote_collateral.clone(), + now, + |mut tcb_info| { + OVERRIDE_CALLS.fetch_add(1, Ordering::SeqCst); + for level in &mut tcb_info.tcb_levels { + level.tcb_status = TcbStatus::OutOfDate; + } + tcb_info + }, + ) + .map(|result| result.into_report_unchecked()) + .expect("verify with override"); assert_eq!(OVERRIDE_CALLS.load(Ordering::SeqCst), 1); assert_eq!(ring_overridden.status, "OutOfDate"); diff --git a/tests/vite/src/main.ts b/tests/vite/src/main.ts index a1322dd..92b73dc 100644 --- a/tests/vite/src/main.ts +++ b/tests/vite/src/main.ts @@ -1,6 +1,6 @@ import "./style.css"; -import init, { js_verify, js_get_collateral } from "@phala/dcap-qvl-web"; +import init, { QuoteVerifier, SimplePolicy } from "@phala/dcap-qvl-web"; import wasm from "@phala/dcap-qvl-web/dcap-qvl-web_bg.wasm"; const PCCS_URL = "https://pccs.phala.network/tdx/certification/v4"; @@ -16,16 +16,14 @@ async function fetchQuoteAsUint8Array(url: string): Promise { init(wasm).then(() => { console.log("Phala DCAP QVL initialized!"); - // You can now use js_verify, js_get_collateral, etc. fetchQuoteAsUint8Array("/sample/tdx_quote").then(async (rawQuote) => { - const quoteCollateral = await js_get_collateral(PCCS_URL, rawQuote); - - // Current timestamp + const quoteCollateral = await QuoteVerifier.get_collateral(PCCS_URL, rawQuote); const now = BigInt(Math.floor(Date.now() / 1000)); - - // Call the js_verify function - const result = js_verify(rawQuote, quoteCollateral, now); - console.log("Verification Result:", result); + const verifier = new QuoteVerifier(); + const result = verifier.verify(rawQuote, quoteCollateral, now); + const policy = new SimplePolicy(now); + const report = result.validate(policy); + console.log("Verification Result:", report); }); }).catch((error: unknown) => { console.error("Error:", error);