Skip to content

Commit 5672029

Browse files
rozbbMichael Rosenberg
andauthored
Add proof generation to sequencer (#52)
* Add inclusion and consistency proof functionality to sequencer * Remove check that checkpoint extensions are empty * Move generic_log_worker/src/ctlog.rs to log_ops.rs --------- Co-authored-by: Michael Rosenberg <mrosenberg@cloudflare.com>
1 parent 264564a commit 5672029

File tree

5 files changed

+100
-25
lines changed

5 files changed

+100
-25
lines changed

crates/ct_worker/src/frontend_worker.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,9 @@ use crate::{load_signing_key, load_witness_key, LookupKey, SequenceMetadata, CON
99
use config::TemporalInterval;
1010
use futures_util::future::try_join_all;
1111
use generic_log_worker::{
12-
ctlog::UploadOptions, get_cached_metadata, get_durable_object_stub, init_logging,
13-
load_cache_kv, load_public_bucket, put_cache_entry_metadata, ObjectBackend, ObjectBucket,
14-
ENTRY_ENDPOINT, METRICS_ENDPOINT,
12+
get_cached_metadata, get_durable_object_stub, init_logging, load_cache_kv, load_public_bucket,
13+
log_ops::UploadOptions, put_cache_entry_metadata, ObjectBackend, ObjectBucket, ENTRY_ENDPOINT,
14+
METRICS_ENDPOINT,
1515
};
1616
use log::{debug, info, warn};
1717
use p256::pkcs8::EncodePublicKey;

crates/generic_log_worker/src/lib.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use base64::prelude::BASE64_STANDARD;
55
use base64::Engine;
66

77
pub mod batcher_do;
8-
pub mod ctlog;
8+
pub mod log_ops;
99
mod metrics;
1010
pub mod sequencer_do;
1111
pub mod util;
@@ -14,8 +14,8 @@ pub use batcher_do::*;
1414
pub use sequencer_do::*;
1515

1616
use byteorder::{BigEndian, WriteBytesExt};
17-
use ctlog::UploadOptions;
1817
use log::Level;
18+
use log_ops::UploadOptions;
1919
use metrics::{millis_diff_as_secs, AsF64, ObjectMetrics};
2020
use serde_bytes::ByteBuf;
2121
use sha2::{Digest, Sha256};
Lines changed: 56 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ use crate::{
2424
CacheRead, CacheWrite, LockBackend, LookupKey, ObjectBackend, SequenceMetadata,
2525
SequencerConfig,
2626
};
27-
use anyhow::{anyhow, bail, ensure};
27+
use anyhow::{anyhow, bail};
2828
use futures_util::future::try_join_all;
2929
use log::{debug, error, info, trace, warn};
3030
use serde::{Deserialize, Serialize};
@@ -36,8 +36,8 @@ use std::{
3636
};
3737
use thiserror::Error;
3838
use tlog_tiles::{
39-
Hash, HashReader, LogEntry, PendingLogEntry, Tile, TileIterator, TlogError, TlogTile,
40-
TreeWithTimestamp, UnixTimestamp, HASH_SIZE,
39+
tlog, Hash, HashReader, LogEntry, PendingLogEntry, RecordProof, Tile, TileIterator, TlogError,
40+
TlogTile, TreeProof, TreeWithTimestamp, UnixTimestamp, HASH_SIZE,
4141
};
4242
use tokio::sync::watch::{channel, Receiver, Sender};
4343

@@ -319,11 +319,6 @@ impl SequenceState {
319319
now_millis(),
320320
&stored_checkpoint,
321321
)?;
322-
// We don't use extension lines for any of our checkpoints
323-
ensure!(
324-
c.extension().is_empty(),
325-
"unexpected extension in DO checkpoint"
326-
);
327322

328323
let timestamp = match timestamp {
329324
Some(timestamp) => timestamp,
@@ -344,11 +339,6 @@ impl SequenceState {
344339
std::str::from_utf8(&stored_checkpoint)?
345340
);
346341
let (c1, _) = tlog_tiles::open_checkpoint(&config.origin, &verifiers, now_millis(), &sth)?;
347-
// We don't use extension lines for any of our checkpoints
348-
ensure!(
349-
c1.extension().is_empty(),
350-
"unexpected extension in R2 checkpoint"
351-
);
352342

353343
match (Ord::cmp(&c1.size(), &c.size()), c1.hash() == c.hash()) {
354344
(Ordering::Equal, false) => {
@@ -453,6 +443,33 @@ impl SequenceState {
453443
checkpoint: stored_checkpoint,
454444
})
455445
}
446+
447+
/// Proves inclusion of the last leaf in the current tree
448+
pub(crate) fn prove_inclusion_of_last_elem(&self) -> RecordProof {
449+
let tree_size = self.tree.size();
450+
let reader = HashReaderWithOverlay {
451+
edge_tiles: &self.edge_tiles,
452+
overlay: &HashMap::default(),
453+
};
454+
// We can unwrap because edge_tiles is guaranteed to contain the tiles
455+
// necessary to prove this
456+
tlog::prove_record(tree_size, tree_size - 1, &reader).unwrap()
457+
}
458+
459+
/// Proves that this tree of size n is compatible with the subtree of size
460+
/// n-1. In other words, prove that we appended 1 element to the tree.
461+
///
462+
/// # Errors
463+
/// Errors when the last tree was size 0. We cannot prove consistency with
464+
/// respect to an empty tree
465+
pub(crate) fn prove_consistency_of_single_append(&self) -> Result<TreeProof, TlogError> {
466+
let tree_size = self.tree.size();
467+
let reader = HashReaderWithOverlay {
468+
edge_tiles: &self.edge_tiles,
469+
overlay: &HashMap::default(),
470+
};
471+
tlog::prove_tree(tree_size, tree_size - 1, &reader)
472+
}
456473
}
457474

458475
/// Result of an [`add_leaf_to_pool`] request containing either a cached log
@@ -1225,6 +1242,20 @@ mod tests {
12251242
let (leaf_index, _) = block_on(res.resolve()).unwrap();
12261243
assert_eq!(leaf_index, i);
12271244
log.check(i + 1).unwrap();
1245+
1246+
// Check we can make proofs
1247+
log.sequence_state
1248+
.as_ref()
1249+
.unwrap()
1250+
.prove_inclusion_of_last_elem();
1251+
// Can't prove consistency with a size-0 subtree
1252+
if i > 0 {
1253+
log.sequence_state
1254+
.as_ref()
1255+
.unwrap()
1256+
.prove_consistency_of_single_append()
1257+
.unwrap();
1258+
}
12281259
}
12291260
log.check(n).unwrap();
12301261
}
@@ -1251,6 +1282,18 @@ mod tests {
12511282
add_leaf_to_pool(&mut log.pool_state, &log.cache, &log.config, leaf);
12521283
}
12531284
log.sequence().unwrap();
1285+
1286+
// Check we can make proofs
1287+
log.sequence_state
1288+
.as_ref()
1289+
.unwrap()
1290+
.prove_inclusion_of_last_elem();
1291+
// We're batch-adding, so there's no chance tree_size - 1 is 0
1292+
log.sequence_state
1293+
.as_ref()
1294+
.unwrap()
1295+
.prove_consistency_of_single_append()
1296+
.unwrap();
12541297
}
12551298
log.check(5 + 500 * 3000).unwrap();
12561299
}

crates/generic_log_worker/src/sequencer_do.rs

Lines changed: 37 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
use std::time::Duration;
77

88
use crate::{
9-
ctlog::{self, CreateError, PoolState, SequenceState},
9+
log_ops::{self, CreateError, PoolState, SequenceState},
1010
metrics::{millis_diff_as_secs, ObjectMetrics, SequencerMetrics},
1111
util::now_millis,
1212
DedupCache, LookupKey, MemoryCache, ObjectBucket, SequenceMetadata, BATCH_ENDPOINT,
@@ -15,7 +15,7 @@ use crate::{
1515
use futures_util::future::join_all;
1616
use log::{info, warn};
1717
use prometheus::{Registry, TextEncoder};
18-
use tlog_tiles::{CheckpointSigner, LogEntry, PendingLogEntry};
18+
use tlog_tiles::{CheckpointSigner, LogEntry, PendingLogEntry, RecordProof};
1919
use tokio::sync::Mutex;
2020
use worker::{Bucket, Error as WorkerError, Request, Response, State};
2121

@@ -141,7 +141,7 @@ impl<E: PendingLogEntry> GenericSequencer<E> {
141141
.set_alarm(self.config.sequence_interval)
142142
.await?;
143143

144-
if let Err(e) = ctlog::sequence::<L>(
144+
if let Err(e) = log_ops::sequence::<L>(
145145
&mut self.pool_state,
146146
&mut self.sequence_state,
147147
&self.config,
@@ -174,7 +174,7 @@ impl<E: PendingLogEntry> GenericSequencer<E> {
174174
warn!("Failed to load short-term dedup cache from DO storage: {e}");
175175
};
176176

177-
match ctlog::create_log(&self.config, &self.public_bucket, &self.do_state).await {
177+
match log_ops::create_log(&self.config, &self.public_bucket, &self.do_state).await {
178178
Err(CreateError::LogExists) => info!("{name}: Log exists, not creating"),
179179
Err(CreateError::Other(msg)) => {
180180
return Err(format!("{name}: failed to create: {msg}").into())
@@ -203,7 +203,7 @@ impl<E: PendingLogEntry> GenericSequencer<E> {
203203
for pending_entry in pending_entries {
204204
lookup_keys.push(pending_entry.lookup_key());
205205

206-
let add_leaf_result = ctlog::add_leaf_to_pool(
206+
let add_leaf_result = log_ops::add_leaf_to_pool(
207207
&mut self.pool_state,
208208
&self.cache,
209209
&self.config,
@@ -228,6 +228,38 @@ impl<E: PendingLogEntry> GenericSequencer<E> {
228228
.collect::<Vec<_>>()
229229
}
230230

231+
/// Proves inclusion of the last leaf in the current tree. This may only be
232+
/// called after the sequencer state has been loaded, i.e., after the first
233+
/// `alarm()` has triggered.
234+
///
235+
/// # Errors
236+
/// Errors when sequencer state has not been loaded
237+
pub fn prove_inclusion_of_last_elem(&self) -> Result<RecordProof, WorkerError> {
238+
if let Some(s) = self.sequence_state.as_ref() {
239+
Ok(s.prove_inclusion_of_last_elem())
240+
} else {
241+
Err(WorkerError::RustError(
242+
"cannot prove inclusion in a sequencer with no sequence state".to_string(),
243+
))
244+
}
245+
}
246+
247+
/// Proves that this tree of size n is compatible with the subtree of size
248+
/// n-1. In other words, prove that we appended 1 element to the tree.
249+
///
250+
/// # Errors
251+
/// Errors when this sequencer has not been used to sequence anything yet.
252+
pub fn prove_consistency_of_single_append(&self) -> Result<RecordProof, WorkerError> {
253+
if let Some(s) = self.sequence_state.as_ref() {
254+
s.prove_consistency_of_single_append()
255+
.map_err(|e| WorkerError::RustError(e.to_string()))
256+
} else {
257+
Err(WorkerError::RustError(
258+
"cannot prove inclusion in a sequencer with no sequence state".to_string(),
259+
))
260+
}
261+
}
262+
231263
fn fetch_metrics(&self) -> Result<Response, WorkerError> {
232264
let mut buffer = String::new();
233265
let encoder = TextEncoder::new();

crates/mtc_worker/src/frontend_worker.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ use std::{str::FromStr, sync::LazyLock, time::Duration};
88
use crate::{load_signing_key, load_witness_key, LookupKey, SequenceMetadata, CONFIG, ROOTS};
99
use futures_util::future::try_join_all;
1010
use generic_log_worker::{
11-
ctlog::UploadOptions, get_cached_metadata, get_durable_object_stub, init_logging,
12-
load_cache_kv, load_public_bucket, put_cache_entry_metadata, util::now_millis, ObjectBackend,
11+
get_cached_metadata, get_durable_object_stub, init_logging, load_cache_kv, load_public_bucket,
12+
log_ops::UploadOptions, put_cache_entry_metadata, util::now_millis, ObjectBackend,
1313
ObjectBucket, ENTRY_ENDPOINT, METRICS_ENDPOINT,
1414
};
1515
use log::{debug, info, warn};

0 commit comments

Comments
 (0)