|
3 | 3 |
|
4 | 4 | //! Sequencer is the 'brain' of the CT log, responsible for sequencing entries and maintaining log state. |
5 | 5 |
|
6 | | -use std::{future::Future, pin::Pin, time::Duration}; |
| 6 | +use std::{collections::VecDeque, time::Duration}; |
7 | 7 |
|
8 | 8 | use crate::{load_checkpoint_cosigner, load_origin, CONFIG}; |
9 | 9 | use generic_log_worker::{ |
10 | | - get_durable_object_name, load_public_bucket, CheckpointCallbacker, GenericSequencer, |
11 | | - SequencerConfig, SEQUENCER_BINDING, |
| 10 | + get_durable_object_name, load_public_bucket, |
| 11 | + log_ops::{prove_subtree_consistency, ProofError}, |
| 12 | + CachedRoObjectBucket, CheckpointCallbacker, GenericSequencer, ObjectBucket, SequencerConfig, |
| 13 | + SEQUENCER_BINDING, |
12 | 14 | }; |
13 | | -use mtc_api::{BootstrapMtcLogEntry, LandmarkSequence, LANDMARK_KEY}; |
14 | | -use tlog_tiles::UnixTimestamp; |
| 15 | +use mtc_api::{BootstrapMtcLogEntry, LandmarkSequence, LANDMARK_BUNDLE_KEY, LANDMARK_KEY}; |
| 16 | +use serde::{Deserialize, Serialize}; |
| 17 | +use serde_with::{base64::Base64, serde_as}; |
| 18 | +use signed_note::Note; |
| 19 | +use tlog_tiles::{CheckpointText, Hash, UnixTimestamp}; |
15 | 20 | #[allow(clippy::wildcard_imports)] |
16 | 21 | use worker::*; |
17 | 22 |
|
@@ -53,14 +58,47 @@ impl DurableObject for Sequencer { |
53 | 58 | } |
54 | 59 | } |
55 | 60 |
|
| 61 | +#[serde_as] |
| 62 | +#[derive(Serialize, Deserialize)] |
| 63 | +pub struct SubtreeWithConsistencyProof { |
| 64 | + #[serde_as(as = "Base64")] |
| 65 | + pub hash: [u8; 32], |
| 66 | + #[serde_as(as = "Vec<Base64>")] |
| 67 | + pub consistency_proof: Vec<[u8; 32]>, |
| 68 | +} |
| 69 | + |
| 70 | +/// GET response structure for the `/get-landmark-bundle` endpoint |
| 71 | +#[derive(Serialize, Deserialize)] |
| 72 | +pub struct LandmarkBundle { |
| 73 | + pub checkpoint: String, |
| 74 | + pub subtrees: Vec<SubtreeWithConsistencyProof>, |
| 75 | + pub landmarks: VecDeque<u64>, |
| 76 | +} |
| 77 | + |
56 | 78 | /// Return a callback function that gets passed into the generic sequencer and |
57 | 79 | /// called each time a new checkpoint is created. For MTC, this is used to |
58 | 80 | /// periodically update the landmark checkpoint sequence. |
59 | 81 | fn checkpoint_callback(env: &Env, name: &str) -> CheckpointCallbacker { |
60 | 82 | let params = &CONFIG.logs[name]; |
61 | 83 | let bucket = load_public_bucket(env, name).unwrap(); |
62 | 84 | Box::new( |
63 | | - move |tree_size: u64, old_time: UnixTimestamp, new_time: UnixTimestamp| { |
| 85 | + move |old_time: UnixTimestamp, new_time: UnixTimestamp, new_checkpoint_bytes: &[u8]| { |
| 86 | + let new_checkpoint = { |
| 87 | + // TODO: Make more efficient. There are two unnecessary allocations here. |
| 88 | + |
| 89 | + // We can unwrap because the checkpoint provided is the checkpoint that the |
| 90 | + // sequencer just created, so it must be well formed. |
| 91 | + let note = Note::from_bytes(new_checkpoint_bytes) |
| 92 | + .expect("freshly created checkpoint is not a note"); |
| 93 | + CheckpointText::from_bytes(note.text()) |
| 94 | + .expect("freshly created checkpoint is not a checkpoint") |
| 95 | + }; |
| 96 | + let tree_size = new_checkpoint.size(); |
| 97 | + let root_hash = *new_checkpoint.hash(); |
| 98 | + // We can unwrap here for the same reason as above |
| 99 | + let new_checkpoint_str = String::from_utf8(new_checkpoint_bytes.to_vec()) |
| 100 | + .expect("freshly created checkpoint is not UTF-8"); |
| 101 | + |
64 | 102 | Box::pin({ |
65 | 103 | // We have to clone each time since the bucket gets moved into |
66 | 104 | // the async function. |
@@ -100,9 +138,63 @@ fn checkpoint_callback(env: &Env, name: &str) -> CheckpointCallbacker { |
100 | 138 | .execute() |
101 | 139 | .await?; |
102 | 140 | } |
| 141 | + |
| 142 | + // Compute the landmark bundle and save it |
| 143 | + let subtrees = |
| 144 | + get_landmark_subtrees(&seq, root_hash, tree_size, bucket_clone.clone()) |
| 145 | + .await?; |
| 146 | + let bundle = LandmarkBundle { |
| 147 | + checkpoint: new_checkpoint_str, |
| 148 | + subtrees, |
| 149 | + landmarks: seq.landmarks, |
| 150 | + }; |
| 151 | + // TODO: the put operation here should be done with the put operation above. |
| 152 | + // Otherwise an error here might put us in a state where the landmark bundle is |
| 153 | + // out of sync with the landmark sequence. We need an all-or-nothing multi-put |
| 154 | + // operation. Tracking issue here https://github.com/cloudflare/workers-rs/issues/876 |
| 155 | + bucket_clone |
| 156 | + // Can unwrap here because we use the autoderived Serialize impl for LandmarkBundle |
| 157 | + .put(LANDMARK_BUNDLE_KEY, serde_json::to_vec(&bundle).unwrap()) |
| 158 | + .execute() |
| 159 | + .await?; |
| 160 | + |
103 | 161 | Ok(()) |
104 | 162 | } |
105 | | - }) as Pin<Box<dyn Future<Output = Result<()>>>> |
| 163 | + }) |
106 | 164 | }, |
107 | 165 | ) |
108 | 166 | } |
| 167 | + |
| 168 | +// Computes the sequence of landmark subtrees and, for each subtree, a proof of consistency with the |
| 169 | +// checkpoint. Each signatureless MTC includes an inclusion proof in one of these subtrees. |
| 170 | +async fn get_landmark_subtrees( |
| 171 | + landmark_sequence: &LandmarkSequence, |
| 172 | + checkpoint_hash: Hash, |
| 173 | + checkpoint_size: u64, |
| 174 | + bucket: Bucket, |
| 175 | +) -> Result<Vec<SubtreeWithConsistencyProof>> { |
| 176 | + let cached_object_backend = CachedRoObjectBucket::new(ObjectBucket::new(bucket)); |
| 177 | + let mut subtrees = Vec::new(); |
| 178 | + for landmark_subtree in landmark_sequence.subtrees() { |
| 179 | + let (consistency_proof, landmark_subtree_hash) = match prove_subtree_consistency( |
| 180 | + checkpoint_hash, |
| 181 | + checkpoint_size, |
| 182 | + landmark_subtree.lo(), |
| 183 | + landmark_subtree.hi(), |
| 184 | + &cached_object_backend, |
| 185 | + ) |
| 186 | + .await |
| 187 | + { |
| 188 | + Ok(p) => p, |
| 189 | + Err(ProofError::Tlog(s)) => return Err(s.to_string().into()), |
| 190 | + Err(ProofError::Other(e)) => return Err(e.to_string().into()), |
| 191 | + }; |
| 192 | + |
| 193 | + subtrees.push(SubtreeWithConsistencyProof { |
| 194 | + hash: landmark_subtree_hash.0, |
| 195 | + consistency_proof: consistency_proof.iter().map(|h| h.0).collect(), |
| 196 | + }); |
| 197 | + } |
| 198 | + |
| 199 | + Ok(subtrees) |
| 200 | +} |
0 commit comments