Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
67 changes: 67 additions & 0 deletions crates/mtc_api/src/landmark.rs
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,49 @@ impl LandmarkSequence {
landmarks,
})
}

/// Iterate over the sequence of subtrees determined by the landmark sequence.
pub fn subtrees(&self) -> LandmarkSubtreesIterator<'_> {
LandmarkSubtreesIterator {
index: 1,
landmarks: &self.landmarks,
next_subtree: None,
}
}
}

/// An iterator over the subtrees determined by the landmark sequence.
pub struct LandmarkSubtreesIterator<'a> {
index: usize,
landmarks: &'a VecDeque<u64>,
next_subtree: Option<Subtree>,
}

impl Iterator for LandmarkSubtreesIterator<'_> {
type Item = Subtree;

fn next(&mut self) -> Option<Subtree> {
if self.landmarks.len() < 2 {
return None;
}

if let Some(subtree) = self.next_subtree.take() {
self.next_subtree = None;
return Some(subtree);
}

if self.index == self.landmarks.len() {
return None;
}

let subtree;
(subtree, self.next_subtree) =
Subtree::split_interval(self.landmarks[self.index - 1], self.landmarks[self.index])
.unwrap();

self.index += 1;
Some(subtree)
}
}

#[cfg(test)]
Expand Down Expand Up @@ -233,4 +276,28 @@ mod tests {
// Past last landmark.
assert!(seq.subtree_for_index(200).is_none());
}

#[test]
fn test_subtrees() {
let mut seq = LandmarkSequence::create(10);
assert!(seq.subtrees().next().is_none());

for i in 1..=5 {
seq.add(i * 10).unwrap();
}
let got = seq.subtrees().collect::<Vec<_>>();
let want = vec![
Subtree::new(0, 8).unwrap(),
Subtree::new(8, 10).unwrap(),
Subtree::new(8, 16).unwrap(),
Subtree::new(16, 20).unwrap(),
Subtree::new(20, 24).unwrap(),
Subtree::new(24, 30).unwrap(),
Subtree::new(30, 32).unwrap(),
Subtree::new(32, 40).unwrap(),
Subtree::new(40, 48).unwrap(),
Subtree::new(48, 50).unwrap(),
];
assert_eq!(got, want);
}
}
145 changes: 114 additions & 31 deletions crates/mtc_worker/src/frontend_worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@ use der::{
use generic_log_worker::{
batcher_id_from_lookup_key, deserialize, get_cached_metadata, get_durable_object_stub,
init_logging, load_cache_kv, load_public_bucket,
log_ops::{prove_subtree_inclusion, read_leaf, ProofError, CHECKPOINT_KEY},
log_ops::{
prove_subtree_consistency, prove_subtree_inclusion, read_leaf, ProofError, CHECKPOINT_KEY,
},
put_cache_entry_metadata, serialize,
util::now_millis,
ObjectBackend, ObjectBucket, ENTRY_ENDPOINT, METRICS_ENDPOINT,
Expand All @@ -28,7 +30,9 @@ use serde::{Deserialize, Serialize};
use serde_with::{base64::Base64, serde_as};
use signed_note::{NoteVerifier, VerifierList};
use std::time::Duration;
use tlog_tiles::{open_checkpoint, LeafIndex, PendingLogEntry, PendingLogEntryBlob};
use tlog_tiles::{
open_checkpoint, CheckpointText, LeafIndex, PendingLogEntry, PendingLogEntryBlob,
};
#[allow(clippy::wildcard_imports)]
use worker::*;
use x509_cert::{
Expand Down Expand Up @@ -71,6 +75,24 @@ pub struct GetCertificateResponse {
pub landmark_id: usize,
}

/// GET response structure for the `/get-landmark-bundle` endpoint
#[serde_as]
#[derive(Serialize, Deserialize)]
pub struct GetLandmarkBundleResponse {
#[serde_as(as = "Base64")]
pub checkpoint: Vec<u8>,
pub subtrees: Vec<SubtreeWithConsistencyProof>,
}

#[serde_as]
#[derive(Serialize, Deserialize)]
pub struct SubtreeWithConsistencyProof {
pub start: u64,
pub end: u64,
#[serde_as(as = "Vec<Base64>")]
pub consistency_proof: Vec<Vec<u8>>,
}

/// Start is the first code run when the Wasm module is loaded.
#[event(start)]
fn start() {
Expand Down Expand Up @@ -128,39 +150,13 @@ async fn main(req: Request, env: Env, _ctx: Context) -> Result<Response> {
let object_backend = ObjectBucket::new(load_public_bucket(&ctx.env, name)?);
// Fetch the current checkpoint to know which tiles to fetch
// (full or partials).
let checkpoint_bytes = object_backend
.fetch(CHECKPOINT_KEY)
.await?
.ok_or("no checkpoint in object storage".to_string())?;
let origin = &load_origin(name);
let verifiers = &VerifierList::new(
load_checkpoint_signers(&ctx.env, name)
.iter()
.map(|s| s.verifier())
.collect::<Vec<Box<dyn NoteVerifier>>>(),
);
let checkpoint = open_checkpoint(
origin.as_str(),
verifiers,
now_millis(),
&checkpoint_bytes,
)
.map_err(|e| e.to_string())?
.0;
let (checkpoint, _checkpoint_bytes) =
get_current_checkpoint(&ctx.env, name, &object_backend).await?;
if leaf_index >= checkpoint.size() {
return Response::error("Leaf index is not in log", 422);
}

let seq = if let Some(bytes) = object_backend.fetch(LANDMARK_KEY).await? {
let max_landmarks = params
.max_certificate_lifetime_secs
.div_ceil(params.landmark_interval_secs)
+ 1;
LandmarkSequence::from_bytes(&bytes, max_landmarks)
.map_err(|e| e.to_string())?
} else {
return Err("failed to get landmark sequence".into());
};
let seq = get_landmark_sequence(name, &object_backend).await?;
if leaf_index < seq.first_index() {
return Response::error("Leaf index is before first active landmark", 422);
}
Expand Down Expand Up @@ -223,6 +219,9 @@ async fn main(req: Request, env: Env, _ctx: Context) -> Result<Response> {

Response::from_json(&GetCertificateResponse { data, landmark_id })
})
.get_async("/logs/:log/get-landmark-bundle", |_req, ctx| async move {
get_landmark_bundle(&ctx.env, ctx.data).await
})
.get("/logs/:log/metadata", |_req, ctx| {
let name = ctx.data;
let params = &CONFIG.logs[name];
Expand Down Expand Up @@ -442,6 +441,90 @@ async fn add_entry(mut req: Request, env: &Env, name: &str) -> Result<Response>
})
}

async fn get_landmark_bundle(env: &Env, name: &str) -> Result<Response> {
let object_backend = ObjectBucket::new(load_public_bucket(env, name)?);

// Fetch the current checkpoint.
let (checkpoint, checkpoint_bytes) = get_current_checkpoint(env, name, &object_backend).await?;

// Fetch the current landmark sequence.
let landmark_sequence = get_landmark_sequence(name, &object_backend).await?;

// Compute the sequence of landmark subtrees and, for each subtree, a proof of consistency with
// the checkpoint. Each signatureless MTC includes an inclusion proof in one of these subtrees.
let mut subtrees = Vec::new();
for landmark_subtree in landmark_sequence.subtrees() {
let consistency_proof = match prove_subtree_consistency(
*checkpoint.hash(),
checkpoint.size(),
landmark_subtree.lo(),
landmark_subtree.hi(),
&object_backend,
)
.await
{
Ok(p) => p,
Err(ProofError::Tlog(s)) => return Response::error(s.to_string(), 422),
Err(ProofError::Other(e)) => return Err(e.to_string().into()),
};

subtrees.push(SubtreeWithConsistencyProof {
start: landmark_subtree.lo(),
end: landmark_subtree.hi(),
consistency_proof: consistency_proof.iter().map(|h| h.0.to_vec()).collect(),
});
}

Response::from_json(&GetLandmarkBundleResponse {
checkpoint: checkpoint_bytes,
subtrees,
})
}

async fn get_current_checkpoint(
env: &Env,
name: &str,
object_backend: &ObjectBucket,
) -> Result<(CheckpointText, Vec<u8>)> {
let checkpoint_bytes = object_backend
.fetch(CHECKPOINT_KEY)
.await?
.ok_or("no checkpoint in object storage".to_string())?;

let origin = &load_origin(name);
let verifiers = &VerifierList::new(
load_checkpoint_signers(env, name)
.iter()
.map(|s| s.verifier())
.collect::<Vec<Box<dyn NoteVerifier>>>(),
);
let (checkpoint, _timestamp) =
open_checkpoint(origin.as_str(), verifiers, now_millis(), &checkpoint_bytes)
.map_err(|e| e.to_string())?;
Ok((checkpoint, checkpoint_bytes))
}

async fn get_landmark_sequence(
name: &str,
object_backend: &ObjectBucket,
) -> Result<LandmarkSequence> {
let params = &CONFIG.logs[name];

let Some(landmark_sequence_bytes) = object_backend.fetch(LANDMARK_KEY).await? else {
return Err("failed to get landmark sequence".into());
};

let max_landmarks = params
.max_certificate_lifetime_secs
.div_ceil(params.landmark_interval_secs)
+ 1;

let landmark_sequence = LandmarkSequence::from_bytes(&landmark_sequence_bytes, max_landmarks)
.map_err(|e| e.to_string())?;

Ok(landmark_sequence)
}

fn headers_from_http_metadata(meta: HttpMetadata) -> Headers {
let h = Headers::new();
if let Some(hdr) = meta.cache_control {
Expand Down