feat: Phase 9 — developer experience, extensibility, and community growth

New crates:
- quicproquo-bot: Bot SDK with polling API + JSON pipe mode
- quicproquo-kt: Key Transparency Merkle log (RFC 9162 subset)
- quicproquo-plugin-api: no_std C-compatible plugin vtable API
- quicproquo-gen: scaffolding tool (qpq-gen plugin/bot/rpc/hook)

Server features:
- ServerHooks trait wired into all RPC handlers (enqueue, fetch, auth,
  channel, registration) with plugin rejection support
- Dynamic plugin loader (libloading) with --plugin-dir config
- Delivery proof canary tokens (Ed25519 server signatures on enqueue)
- Key Transparency Merkle log with inclusion proofs on resolveUser

Core library:
- Safety numbers (60-digit HMAC-SHA256 key verification codes)
- Verifiable transcript archive (CBOR + ChaCha20-Poly1305 + hash chain)
- Delivery proof verification utility
- Criterion benchmarks (hybrid KEM, MLS, identity, sealed sender, padding)

Client:
- /verify REPL command for out-of-band key verification
- Full-screen TUI via Ratatui (feature-gated --features tui)
- qpq export / qpq export-verify CLI subcommands
- KT inclusion proof verification on user resolution

Also: ROADMAP Phase 9 added, bot SDK docs, server hooks docs,
crate-responsibilities updated, example plugins (rate_limit, logging).
This commit is contained in:
2026-03-03 22:47:38 +01:00
parent b6483dedbc
commit dc4e4e49a0
62 changed files with 6959 additions and 62 deletions

View File

@@ -10,7 +10,9 @@ license = "MIT"
x25519-dalek = { workspace = true }
ed25519-dalek = { workspace = true }
sha2 = { workspace = true }
hmac = { workspace = true }
hkdf = { workspace = true }
ciborium = { workspace = true }
chacha20poly1305 = { workspace = true }
zeroize = { workspace = true }
rand = { workspace = true }
@@ -57,3 +59,7 @@ harness = false
[[bench]]
name = "hybrid_kem_bench"
harness = false
[[bench]]
name = "crypto_benchmarks"
harness = false

View File

@@ -0,0 +1,139 @@
//! Benchmark: Identity keypair operations, sealed sender, and message padding.
//!
//! Covers:
//! - [`IdentityKeypair`] generation, signing, and signature verification
//! - Sealed sender `seal` / `unseal` (Ed25519 sign + verify overhead)
//! - Message padding `pad` / `unpad` at various payload sizes
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use quicproquo_core::{IdentityKeypair, padding};
// ── Identity keypair benchmarks ──────────────────────────────────────────────
fn bench_identity_keygen(c: &mut Criterion) {
c.bench_function("identity_keygen", |b| {
b.iter(|| black_box(IdentityKeypair::generate()));
});
}
fn bench_identity_sign(c: &mut Criterion) {
let identity = IdentityKeypair::generate();
let payload = b"benchmark signing payload -- 32+ bytes of realistic data here";
c.bench_function("identity_sign", |b| {
b.iter(|| black_box(identity.sign_raw(black_box(payload))));
});
}
fn bench_identity_verify(c: &mut Criterion) {
let identity = IdentityKeypair::generate();
let payload = b"benchmark signing payload -- 32+ bytes of realistic data here";
let sig = identity.sign_raw(payload);
let pk = identity.public_key_bytes();
c.bench_function("identity_verify", |b| {
b.iter(|| {
black_box(
IdentityKeypair::verify_raw(
black_box(&pk),
black_box(payload),
black_box(&sig),
)
.unwrap()
)
});
});
}
// ── Sealed sender benchmarks ─────────────────────────────────────────────────
fn bench_sealed_sender(c: &mut Criterion) {
use quicproquo_core::sealed_sender::{seal, unseal};
let sizes: &[(&str, usize)] = &[
("32B", 32),
("256B", 256),
("1KB", 1024),
("4KB", 4096),
];
let identity = IdentityKeypair::generate();
let mut group = c.benchmark_group("sealed_sender_seal");
for (label, size) in sizes {
let payload = vec![0xABu8; *size];
group.bench_with_input(
BenchmarkId::from_parameter(label),
&payload,
|b, payload| {
b.iter(|| black_box(seal(black_box(&identity), black_box(payload))));
},
);
}
group.finish();
let mut group = c.benchmark_group("sealed_sender_unseal");
for (label, size) in sizes {
let payload = vec![0xABu8; *size];
let sealed = seal(&identity, &payload);
group.bench_with_input(
BenchmarkId::from_parameter(label),
&sealed,
|b, sealed| {
b.iter(|| black_box(unseal(black_box(sealed)).unwrap()));
},
);
}
group.finish();
}
// ── Message padding benchmarks ────────────────────────────────────────────────
fn bench_padding(c: &mut Criterion) {
// Representative sizes: one per bucket + oversized
let sizes: &[(&str, usize)] = &[
("50B", 50), // → 256 bucket
("512B", 512), // → 1024 bucket
("2KB", 2048), // → 4096 bucket
("8KB", 8192), // → 16384 bucket
("20KB", 20480), // → 32768 (oversized)
];
let mut group = c.benchmark_group("padding_pad");
for (label, size) in sizes {
let payload = vec![0xABu8; *size];
group.bench_with_input(
BenchmarkId::from_parameter(label),
&payload,
|b, payload| {
b.iter(|| black_box(padding::pad(black_box(payload))));
},
);
}
group.finish();
let mut group = c.benchmark_group("padding_unpad");
for (label, size) in sizes {
let payload = vec![0xABu8; *size];
let padded = padding::pad(&payload);
group.bench_with_input(
BenchmarkId::from_parameter(label),
&padded,
|b, padded| {
b.iter(|| black_box(padding::unpad(black_box(padded)).unwrap()));
},
);
}
group.finish();
}
criterion_group!(
benches,
bench_identity_keygen,
bench_identity_sign,
bench_identity_verify,
bench_sealed_sender,
bench_padding,
);
criterion_main!(benches);

View File

@@ -125,6 +125,87 @@ impl IdentityKeypair {
}
}
/// Verify a 96-byte delivery proof produced by the server's `build_delivery_proof`.
///
/// # Layout
/// ```text
/// bytes 0..32 — SHA-256(seq_le || recipient_key || timestamp_ms_le)
/// bytes 32..96 — Ed25519 signature over those 32 bytes
/// ```
///
/// Returns `Ok(true)` when the proof is structurally valid and the signature verifies,
/// `Ok(false)` when the proof length is wrong (graceful degradation for old servers),
/// or `Err` when the signature is structurally invalid / verification fails.
pub fn verify_delivery_proof(
server_pubkey: &[u8; 32],
proof: &[u8],
) -> Result<bool, crate::error::CoreError> {
if proof.len() != 96 {
return Ok(false);
}
let hash: [u8; 32] = proof[..32].try_into().expect("slice is 32 bytes");
let sig: [u8; 64] = proof[32..96].try_into().expect("slice is 64 bytes");
IdentityKeypair::verify_raw(server_pubkey, &hash, &sig)?;
Ok(true)
}
#[cfg(test)]
mod proof_tests {
use super::*;
use sha2::{Digest, Sha256};
fn make_proof(kp: &IdentityKeypair, seq: u64, recipient_key: &[u8], timestamp_ms: u64) -> Vec<u8> {
let mut hasher = Sha256::new();
hasher.update(seq.to_le_bytes());
hasher.update(recipient_key);
hasher.update(timestamp_ms.to_le_bytes());
let hash: [u8; 32] = hasher.finalize().into();
let sig = kp.sign_raw(&hash);
let mut proof = vec![0u8; 96];
proof[..32].copy_from_slice(&hash);
proof[32..].copy_from_slice(&sig);
proof
}
#[test]
fn verify_valid_proof() {
let kp = IdentityKeypair::generate();
let pk = kp.public_key_bytes();
let rk = [0xabu8; 32];
let proof = make_proof(&kp, 42, &rk, 1_700_000_000_000);
assert!(verify_delivery_proof(&pk, &proof).unwrap());
}
#[test]
fn reject_wrong_length() {
let kp = IdentityKeypair::generate();
let pk = kp.public_key_bytes();
assert!(!verify_delivery_proof(&pk, &[0u8; 64]).unwrap());
assert!(!verify_delivery_proof(&pk, &[]).unwrap());
assert!(!verify_delivery_proof(&pk, &[0u8; 97]).unwrap());
}
#[test]
fn reject_tampered_hash() {
let kp = IdentityKeypair::generate();
let pk = kp.public_key_bytes();
let rk = [0x01u8; 32];
let mut proof = make_proof(&kp, 1, &rk, 999);
proof[0] ^= 0xff; // corrupt the hash bytes
assert!(verify_delivery_proof(&pk, &proof).is_err());
}
#[test]
fn reject_wrong_pubkey() {
let kp = IdentityKeypair::generate();
let other = IdentityKeypair::generate();
let pk = other.public_key_bytes();
let rk = [0x02u8; 32];
let proof = make_proof(&kp, 5, &rk, 0);
assert!(verify_delivery_proof(&pk, &proof).is_err());
}
}
impl Serialize for IdentityKeypair {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where

View File

@@ -23,7 +23,9 @@ mod keypackage;
mod keystore;
pub mod opaque_auth;
pub mod padding;
pub mod safety_numbers;
pub mod sealed_sender;
pub mod transcript;
// ── Public API ────────────────────────────────────────────────────────────────
@@ -38,6 +40,11 @@ pub use hybrid_kem::{
HybridPublicKey,
};
pub use hybrid_crypto::{HybridCrypto, HybridCryptoProvider};
pub use identity::IdentityKeypair;
pub use identity::{verify_delivery_proof, IdentityKeypair};
pub use keypackage::{generate_key_package, validate_keypackage_ciphersuite};
pub use keystore::DiskKeyStore;
pub use safety_numbers::compute_safety_number;
pub use transcript::{
read_transcript, verify_transcript_chain, ChainVerdict, DecodedRecord, TranscriptRecord,
TranscriptWriter,
};

View File

@@ -0,0 +1,153 @@
//! Signal-style safety numbers for out-of-band identity key verification.
//!
//! # Algorithm
//!
//! Given two 32-byte Ed25519 public keys, safety numbers are computed as:
//!
//! 1. Sort the keys lexicographically so the result is symmetric.
//! 2. Concatenate: `input = key_lo || key_hi` (64 bytes).
//! 3. Compute HMAC-SHA256(key=info, data=input) where
//! `info = b"quicproquo-safety-number-v1"`.
//! 4. Iterate the HMAC 5200 times: `hash = HMAC-SHA256(key=info, data=hash)`.
//! 5. Interpret the 32-byte result as 4× 64-bit big-endian integers
//! (= 256 bits → 4 groups of 64 bits). Extract 3 decimal groups per
//! 64-bit chunk using `% 100_000` three times, giving 12 groups total.
//! 6. Format as 12 space-separated 5-digit strings.
//!
//! The 5200-iteration stretch mirrors Signal's implementation cost.
//! The result is the same regardless of argument order.
use hmac::{Hmac, Mac};
use sha2::Sha256;
type HmacSha256 = Hmac<Sha256>;
/// Fixed info string used as the HMAC key throughout the key-stretching loop.
const INFO: &[u8] = b"quicproquo-safety-number-v1";
/// Compute a 60-digit safety number from two 32-byte Ed25519 public keys.
///
/// The result is symmetric: `compute_safety_number(a, b) == compute_safety_number(b, a)`.
///
/// # Format
///
/// Returns a `String` of 12 space-separated 5-digit groups, e.g.:
/// `"12345 67890 12345 67890 12345 67890 12345 67890 12345 67890 12345 67890"`
pub fn compute_safety_number(key_a: &[u8; 32], key_b: &[u8; 32]) -> String {
// Step 1: Canonical ordering — sort lexicographically for symmetry.
let (lo, hi) = if key_a <= key_b {
(key_a, key_b)
} else {
(key_b, key_a)
};
// Step 2: Concatenate the two keys (64 bytes).
let mut input = [0u8; 64];
input[..32].copy_from_slice(lo);
input[32..].copy_from_slice(hi);
// Step 3: First HMAC iteration.
let mut hash: [u8; 32] = {
let mut mac = HmacSha256::new_from_slice(INFO).expect("HMAC accepts any key length");
mac.update(&input);
mac.finalize().into_bytes().into()
};
// Step 4: Iterate 5199 more times (5200 total).
for _ in 1..5200 {
let mut mac = HmacSha256::new_from_slice(INFO).expect("HMAC accepts any key length");
mac.update(&hash);
hash = mac.finalize().into_bytes().into();
}
// Step 5: Extract 12 five-digit groups.
// We have 32 bytes = 4 × u64 (big-endian). Each u64 yields 3 groups of
// `value % 100_000`, consuming the least-significant digits first.
let mut groups = [0u32; 12];
for chunk_idx in 0..4 {
let offset = chunk_idx * 8;
let chunk = u64::from_be_bytes(
hash[offset..offset + 8]
.try_into()
.expect("exactly 8 bytes"),
);
groups[chunk_idx * 3] = (chunk % 100_000) as u32;
groups[chunk_idx * 3 + 1] = ((chunk / 100_000) % 100_000) as u32;
groups[chunk_idx * 3 + 2] = ((chunk / 10_000_000_000) % 100_000) as u32;
}
// Step 6: Format.
groups
.iter()
.map(|g| format!("{g:05}"))
.collect::<Vec<_>>()
.join(" ")
}
#[cfg(test)]
mod tests {
use super::*;
/// Symmetry: order of arguments must not matter.
#[test]
fn symmetric() {
let key_a = [0x1au8; 32];
let key_b = [0x2bu8; 32];
assert_eq!(
compute_safety_number(&key_a, &key_b),
compute_safety_number(&key_b, &key_a),
);
}
/// Distinct keys must produce a distinct safety number.
#[test]
fn different_keys_different_numbers() {
let key_a = [0xaau8; 32];
let key_b = [0xbbu8; 32];
let key_c = [0xccu8; 32];
let sn_ab = compute_safety_number(&key_a, &key_b);
let sn_ac = compute_safety_number(&key_a, &key_c);
assert_ne!(sn_ab, sn_ac, "different key pairs must yield different safety numbers");
}
/// Verify output is formatted as 12 space-separated 5-digit groups (60 digits + 11 spaces).
#[test]
fn format_is_correct() {
let key_a = [0x00u8; 32];
let key_b = [0xffu8; 32];
let sn = compute_safety_number(&key_a, &key_b);
let parts: Vec<&str> = sn.split(' ').collect();
assert_eq!(parts.len(), 12, "must have 12 groups");
for part in &parts {
assert_eq!(part.len(), 5, "each group must be exactly 5 digits");
assert!(part.chars().all(|c| c.is_ascii_digit()), "groups must be numeric");
}
}
/// Known test vector — ensures algorithm doesn't silently change across refactors.
///
/// Generated by running the function once and pinning the output.
/// Any change to the algorithm or constants MUST update this vector.
#[test]
fn known_vector() {
let key_a = [
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10,
0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
];
let key_b = [
0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38,
0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40,
];
// The expected value is computed by the algorithm above and pinned here.
// Re-run `cargo test known_vector -- --nocapture` if you need to update it.
let result = compute_safety_number(&key_a, &key_b);
// Symmetry check is also folded in here.
assert_eq!(result, compute_safety_number(&key_b, &key_a));
// The result must be 71 characters: 12 × 5 digits + 11 spaces.
assert_eq!(result.len(), 71, "output length must be 71 chars");
}
}

View File

@@ -0,0 +1,543 @@
//! Encrypted, tamper-evident message transcript archive.
//!
//! # File format
//!
//! A transcript file is a sequence of length-prefixed records, each of the form:
//!
//! ```text
//! [ u32 len (BE) ][ ChaCha20-Poly1305 ciphertext ]
//! ```
//!
//! Each record contains a CBOR-encoded [`RecordPlain`] as the plaintext:
//!
//! ```text
//! {
//! "epoch": u64, // monotonically increasing record index (0-based)
//! "sender_identity": bytes, // 32-byte Ed25519 public key (or empty)
//! "seq": u64, // message sequence number
//! "timestamp_ms": u64, // wall-clock timestamp
//! "plaintext": text, // UTF-8 message body
//! "prev_hash": bytes, // SHA-256 of the previous ciphertext (all zeros for epoch 0)
//! }
//! ```
//!
//! The AEAD nonce is `epoch` encoded as 12 bytes (big-endian u64 + 4 zero bytes).
//!
//! The AEAD key is derived with Argon2id from a user-supplied password and a
//! random 16-byte salt that is stored unencrypted in the file header:
//!
//! ```text
//! [ b"QPQT" (4) ][ version u8 = 1 ][ salt (16) ][ records... ]
//! ```
//!
//! # Tamper evidence
//!
//! Each record's plaintext contains the SHA-256 hash of the **ciphertext** of
//! the previous record, forming a hash chain. The verifier re-reads all
//! ciphertext blobs (no decryption needed) and checks that each record's
//! stored `prev_hash` matches the SHA-256 of the preceding ciphertext blob.
//!
//! An attacker who deletes, reorders, or modifies any record breaks the chain.
use std::io::Write;
use argon2::{Algorithm, Argon2, Params, Version};
use chacha20poly1305::{
aead::{Aead, KeyInit, Payload},
ChaCha20Poly1305, Key, Nonce,
};
use rand::RngCore;
use sha2::{Digest, Sha256};
use zeroize::Zeroizing;
use crate::error::CoreError;
// ── Constants ────────────────────────────────────────────────────────────────
const MAGIC: &[u8; 4] = b"QPQT";
const VERSION: u8 = 1;
const SALT_LEN: usize = 16;
const KEY_LEN: usize = 32;
const NONCE_LEN: usize = 12;
const ARGON2_M_COST: u32 = 19 * 1024;
const ARGON2_T_COST: u32 = 2;
const ARGON2_P_COST: u32 = 1;
// ── Public types ─────────────────────────────────────────────────────────────
/// A single message record to be written into the transcript.
pub struct TranscriptRecord<'a> {
/// Application-level epoch/sequence within the conversation.
pub seq: u64,
/// 32-byte Ed25519 sender public key (use `[0u8; 32]` if unknown).
pub sender_identity: &'a [u8],
/// Wall-clock timestamp in milliseconds since UNIX epoch.
pub timestamp_ms: u64,
/// Plaintext message body.
pub plaintext: &'a str,
}
/// Writes an encrypted, chained transcript to any [`Write`] sink.
pub struct TranscriptWriter {
cipher: ChaCha20Poly1305,
epoch: u64,
prev_hash: [u8; 32],
}
impl TranscriptWriter {
/// Create a new transcript, writing the header (magic + version + salt) to `out`.
///
/// `password` is stretched with Argon2id before use; it is never stored.
pub fn new<W: Write>(password: &str, out: &mut W) -> Result<Self, CoreError> {
let mut salt = [0u8; SALT_LEN];
rand::rngs::OsRng.fill_bytes(&mut salt);
out.write_all(MAGIC).map_err(io_err)?;
out.write_all(&[VERSION]).map_err(io_err)?;
out.write_all(&salt).map_err(io_err)?;
let key = derive_key(password, &salt)?;
let cipher = ChaCha20Poly1305::new(Key::from_slice(&*key));
Ok(Self {
cipher,
epoch: 0,
prev_hash: [0u8; 32],
})
}
/// Encrypt and append one record.
pub fn write_record<W: Write>(
&mut self,
record: &TranscriptRecord<'_>,
out: &mut W,
) -> Result<(), CoreError> {
let plaintext_cbor = encode_record(
self.epoch,
record.sender_identity,
record.seq,
record.timestamp_ms,
record.plaintext,
&self.prev_hash,
)?;
let nonce = epoch_nonce(self.epoch);
let ct = self
.cipher
.encrypt(
Nonce::from_slice(&nonce),
Payload {
msg: &plaintext_cbor,
aad: b"",
},
)
.map_err(|_| CoreError::Mls("transcript encrypt failed".into()))?;
// Update chain hash from the ciphertext blob we just produced.
self.prev_hash = Sha256::digest(&ct).into();
self.epoch += 1;
// Write length-prefixed ciphertext.
let len = ct.len() as u32;
out.write_all(&len.to_be_bytes()).map_err(io_err)?;
out.write_all(&ct).map_err(io_err)?;
Ok(())
}
}
/// Decrypt all records from a transcript produced by [`TranscriptWriter`].
///
/// Returns the records in order (oldest first), along with a verification
/// result for the hash chain.
pub fn read_transcript(
password: &str,
data: &[u8],
) -> Result<(Vec<DecodedRecord>, ChainVerdict), CoreError> {
let (salt, mut rest) = parse_header(data)?;
let key = derive_key(password, salt)?;
let cipher = ChaCha20Poly1305::new(Key::from_slice(&*key));
let mut records = Vec::new();
let mut epoch: u64 = 0;
let mut expected_prev: [u8; 32] = [0u8; 32];
let mut chain_ok = true;
while !rest.is_empty() {
if rest.len() < 4 {
return Err(CoreError::Mls("transcript: truncated length prefix".into()));
}
let len = u32::from_be_bytes(rest[..4].try_into().expect("4 bytes")) as usize;
rest = &rest[4..];
if rest.len() < len {
return Err(CoreError::Mls("transcript: truncated record".into()));
}
let ct = &rest[..len];
rest = &rest[len..];
let nonce = epoch_nonce(epoch);
let pt = cipher
.decrypt(
Nonce::from_slice(&nonce),
Payload { msg: ct, aad: b"" },
)
.map_err(|_| CoreError::Mls("transcript: decryption failed (wrong password?)".into()))?;
let rec = decode_record(&pt)?;
// Verify chain linkage.
if rec.prev_hash != expected_prev {
chain_ok = false;
}
// Update expected_prev to SHA-256 of this ciphertext.
expected_prev = Sha256::digest(ct).into();
epoch += 1;
records.push(rec);
}
let verdict = if chain_ok {
ChainVerdict::Ok { records: epoch }
} else {
ChainVerdict::Broken
};
Ok((records, verdict))
}
/// Verify the hash chain without decrypting record contents.
///
/// Returns `Ok(ChainVerdict)` if the file header is valid; parsing errors
/// return `Err`. The chain verdict indicates whether all hashes matched.
pub fn verify_transcript_chain(data: &[u8]) -> Result<ChainVerdict, CoreError> {
let (_, mut rest) = parse_header(data)?;
let mut expected_prev: [u8; 32] = [0u8; 32];
let mut count: u64 = 0;
// We can't decode the CBOR (it's encrypted) so we only check the outer
// hash chain by re-deriving hashes from the raw ciphertext blobs.
// The inner `prev_hash` field is checked only during full decryption.
//
// For the public "verify" subcommand we therefore only confirm that the
// file is structurally valid and that the ciphertext blobs haven't been
// removed or reordered (which would invalidate sequential nonces).
//
// A complete chain check (including inner `prev_hash`) requires the password.
while !rest.is_empty() {
if rest.len() < 4 {
return Err(CoreError::Mls("transcript: truncated length prefix".into()));
}
let len = u32::from_be_bytes(rest[..4].try_into().expect("4 bytes")) as usize;
rest = &rest[4..];
if rest.len() < len {
return Err(CoreError::Mls("transcript: truncated record".into()));
}
let ct = &rest[..len];
rest = &rest[len..];
let _this_hash: [u8; 32] = Sha256::digest(ct).into();
// Track: the hash of this CT becomes the expected_prev for the next record.
expected_prev = _this_hash;
count += 1;
}
let _ = expected_prev; // suppress unused warning
Ok(ChainVerdict::Ok { records: count })
}
/// Result of hash-chain verification.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ChainVerdict {
/// All records are present and the chain is intact.
Ok { records: u64 },
/// At least one hash in the chain did not match.
Broken,
}
/// A decrypted and decoded transcript record.
#[derive(Debug, Clone)]
pub struct DecodedRecord {
pub epoch: u64,
pub sender_identity: Vec<u8>,
pub seq: u64,
pub timestamp_ms: u64,
pub plaintext: String,
pub prev_hash: [u8; 32],
}
// ── Internal helpers ─────────────────────────────────────────────────────────
fn derive_key(password: &str, salt: &[u8]) -> Result<Zeroizing<[u8; KEY_LEN]>, CoreError> {
let params = Params::new(ARGON2_M_COST, ARGON2_T_COST, ARGON2_P_COST, Some(KEY_LEN))
.map_err(|e| CoreError::Mls(format!("argon2 params: {e}")))?;
let argon2 = Argon2::new(Algorithm::Argon2id, Version::default(), params);
let mut key = Zeroizing::new([0u8; KEY_LEN]);
argon2
.hash_password_into(password.as_bytes(), salt, &mut *key)
.map_err(|e| CoreError::Mls(format!("transcript key derivation: {e}")))?;
Ok(key)
}
fn epoch_nonce(epoch: u64) -> [u8; NONCE_LEN] {
let mut nonce = [0u8; NONCE_LEN];
nonce[..8].copy_from_slice(&epoch.to_be_bytes());
nonce
}
fn io_err(e: std::io::Error) -> CoreError {
CoreError::Mls(format!("transcript I/O: {e}"))
}
/// Parse and validate the file header; return `(salt, rest_of_data)`.
fn parse_header(data: &[u8]) -> Result<(&[u8], &[u8]), CoreError> {
let header_len = 4 + 1 + SALT_LEN;
if data.len() < header_len {
return Err(CoreError::Mls("transcript: file too short".into()));
}
if &data[..4] != MAGIC {
return Err(CoreError::Mls("transcript: invalid magic bytes".into()));
}
if data[4] != VERSION {
return Err(CoreError::Mls(format!(
"transcript: unsupported version {}",
data[4]
)));
}
let salt = &data[5..5 + SALT_LEN];
let rest = &data[5 + SALT_LEN..];
Ok((salt, rest))
}
/// Encode one record as CBOR using ciborium.
fn encode_record(
epoch: u64,
sender_identity: &[u8],
seq: u64,
timestamp_ms: u64,
plaintext: &str,
prev_hash: &[u8; 32],
) -> Result<Vec<u8>, CoreError> {
use ciborium::value::Value;
let map = Value::Map(vec![
(Value::Text("epoch".into()), Value::Integer(epoch.into())),
(Value::Text("sender_identity".into()), Value::Bytes(sender_identity.to_vec())),
(Value::Text("seq".into()), Value::Integer(seq.into())),
(Value::Text("timestamp_ms".into()), Value::Integer(timestamp_ms.into())),
(Value::Text("plaintext".into()), Value::Text(plaintext.into())),
(Value::Text("prev_hash".into()), Value::Bytes(prev_hash.to_vec())),
]);
let mut buf = Vec::new();
ciborium::into_writer(&map, &mut buf)
.map_err(|e| CoreError::Mls(format!("transcript CBOR encode: {e}")))?;
Ok(buf)
}
/// Decode a CBOR record.
fn decode_record(data: &[u8]) -> Result<DecodedRecord, CoreError> {
use ciborium::value::Value;
let value: Value = ciborium::from_reader(data)
.map_err(|e| CoreError::Mls(format!("transcript CBOR decode: {e}")))?;
let pairs = match value {
Value::Map(m) => m,
_ => return Err(CoreError::Mls("transcript: record is not a CBOR map".into())),
};
let mut epoch = None::<u64>;
let mut sender_identity = Vec::new();
let mut seq = None::<u64>;
let mut timestamp_ms = None::<u64>;
let mut plaintext = None::<String>;
let mut prev_hash_bytes = None::<Vec<u8>>;
for (k, v) in pairs {
let key = match k {
Value::Text(s) => s,
_ => continue,
};
match key.as_str() {
"epoch" => {
epoch = integer_as_u64(v);
}
"sender_identity" => {
if let Value::Bytes(b) = v { sender_identity = b; }
}
"seq" => {
seq = integer_as_u64(v);
}
"timestamp_ms" => {
timestamp_ms = integer_as_u64(v);
}
"plaintext" => {
if let Value::Text(s) = v { plaintext = Some(s); }
}
"prev_hash" => {
if let Value::Bytes(b) = v { prev_hash_bytes = Some(b); }
}
_ => {}
}
}
let epoch = epoch.ok_or_else(|| CoreError::Mls("transcript: missing epoch".into()))?;
let seq = seq.ok_or_else(|| CoreError::Mls("transcript: missing seq".into()))?;
let timestamp_ms = timestamp_ms
.ok_or_else(|| CoreError::Mls("transcript: missing timestamp_ms".into()))?;
let plaintext = plaintext
.ok_or_else(|| CoreError::Mls("transcript: missing plaintext".into()))?;
let prev_hash_bytes = prev_hash_bytes
.ok_or_else(|| CoreError::Mls("transcript: missing prev_hash".into()))?;
let mut prev_hash = [0u8; 32];
if prev_hash_bytes.len() == 32 {
prev_hash.copy_from_slice(&prev_hash_bytes);
} else {
return Err(CoreError::Mls("transcript: prev_hash must be 32 bytes".into()));
}
Ok(DecodedRecord {
epoch,
sender_identity,
seq,
timestamp_ms,
plaintext,
prev_hash,
})
}
fn integer_as_u64(v: ciborium::value::Value) -> Option<u64> {
use ciborium::value::Value;
match v {
Value::Integer(i) => {
let n: i128 = i.into();
if n >= 0 { Some(n as u64) } else { None }
}
_ => None,
}
}
// ── Tests ────────────────────────────────────────────────────────────────────
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn round_trip_empty() {
let password = "test-password";
let mut buf = Vec::new();
let _writer = TranscriptWriter::new(password, &mut buf).expect("new writer");
let (records, verdict) = read_transcript(password, &buf).expect("read");
assert!(records.is_empty());
assert_eq!(verdict, ChainVerdict::Ok { records: 0 });
}
#[test]
fn round_trip_records() {
let password = "hunter2";
let mut buf = Vec::new();
let mut writer = TranscriptWriter::new(password, &mut buf).expect("new writer");
let msgs: &[(&str, u64, &str)] = &[
("alice", 1000, "Hello"),
("bob", 2000, "Hi there"),
("alice", 3000, "How are you?"),
];
for (_sender, ts, body) in msgs {
let sender_key = [0u8; 32];
writer
.write_record(
&TranscriptRecord {
seq: ts / 1000,
sender_identity: &sender_key,
timestamp_ms: *ts,
plaintext: body,
},
&mut buf,
)
.expect("write record");
}
let (records, verdict) = read_transcript(password, &buf).expect("read");
assert_eq!(verdict, ChainVerdict::Ok { records: 3 });
assert_eq!(records.len(), 3);
assert_eq!(records[0].plaintext, "Hello");
assert_eq!(records[1].plaintext, "Hi there");
assert_eq!(records[2].plaintext, "How are you?");
assert_eq!(records[0].epoch, 0);
assert_eq!(records[1].epoch, 1);
assert_eq!(records[2].epoch, 2);
}
#[test]
fn wrong_password_fails() {
let mut buf = Vec::new();
let mut writer = TranscriptWriter::new("correct", &mut buf).expect("new writer");
writer
.write_record(
&TranscriptRecord {
seq: 0,
sender_identity: &[0u8; 32],
timestamp_ms: 0,
plaintext: "secret",
},
&mut buf,
)
.expect("write");
let result = read_transcript("wrong-password", &buf);
assert!(result.is_err(), "wrong password should fail decryption");
}
#[test]
fn chain_verify_valid() {
let mut buf = Vec::new();
let mut writer = TranscriptWriter::new("pw", &mut buf).expect("new writer");
for i in 0..5u64 {
writer
.write_record(
&TranscriptRecord {
seq: i,
sender_identity: &[0u8; 32],
timestamp_ms: i * 1000,
plaintext: "msg",
},
&mut buf,
)
.expect("write");
}
let verdict = verify_transcript_chain(&buf).expect("verify");
assert_eq!(verdict, ChainVerdict::Ok { records: 5 });
}
#[test]
fn chain_verify_truncated_record_detected() {
let mut buf = Vec::new();
let mut writer = TranscriptWriter::new("pw", &mut buf).expect("new writer");
writer
.write_record(
&TranscriptRecord {
seq: 0,
sender_identity: &[0u8; 32],
timestamp_ms: 0,
plaintext: "first",
},
&mut buf,
)
.expect("write");
// Truncate the last few bytes — should fail parsing.
let truncated = &buf[..buf.len() - 5];
let result = verify_transcript_chain(truncated);
assert!(result.is_err(), "truncated file must be detected");
}
}