Skip to content

Commit 8a4050f

Browse files
committed
Prepare 1.8.1 release which includes deltalake 0.30.0
This release includes delta_kernel 0.19.0 which has a pretty substantial performance improvement for stats parsing on larger tables Signed-off-by: R. Tyler Croy <[email protected]>
1 parent bf8f7d6 commit 8a4050f

File tree

3 files changed

+13
-14
lines changed

3 files changed

+13
-14
lines changed

Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ members = [
77
resolver = "3"
88

99
[workspace.package]
10-
version = "1.8.0"
10+
version = "1.8.1"
1111
edition = "2024"
1212
keywords = ["deltalake", "parquet", "lambda", "delta", "sqs"]
1313
homepage = "https://github.com/buoyant-data/oxbow"
@@ -20,7 +20,7 @@ anyhow = "=1"
2020
chrono = "0.4"
2121
aws_lambda_events = { version = "0.15", default-features = false, features = ["sns", "sqs", "s3"] }
2222
# The datafusion feature is required to support invariants which may be in error, but is required as of currently released 0.18.2
23-
deltalake = { version = "0.29.2", features = ["s3", "json", "datafusion"] }
23+
deltalake = { version = "0.30.0", features = ["s3", "json", "datafusion"] }
2424
#deltalake = { git = "https://github.com/delta-io/delta-rs", branch = "main", features = ["s3", "json", "datafusion"]}
2525
#deltalake = { path = "../../delta-io/delta-rs/crates/deltalake", features = ["s3", "json", "datafusion"]}
2626
futures = { version = "0.3" }

ci/test.sh

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,5 +3,4 @@ if [ -f "${HOME}/.cargo/env" ]; then
33
. "${HOME}/.cargo/env"
44
fi;
55

6-
7-
exec cargo test --verbose
6+
exec cargo test

crates/oxbow/src/lib.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ pub async fn convert(
8585
Err(e) => {
8686
info!("No Delta table at {}: {:?}", location, e);
8787
let store = logstore_for(
88-
location.clone(),
88+
&location,
8989
StorageConfig::parse_options(storage_options.unwrap_or_default())?,
9090
)?;
9191
let files = discover_parquet_files(store.object_store(None).clone()).await?;
@@ -591,7 +591,7 @@ mod tests {
591591
let url = Url::from_file_path(dir.path()).expect("Failed to parse local path");
592592
(
593593
dir,
594-
logstore_for(url, StorageConfig::default()).expect("Failed to get store"),
594+
logstore_for(&url, StorageConfig::default()).expect("Failed to get store"),
595595
)
596596
}
597597
}
@@ -600,7 +600,7 @@ mod tests {
600600
async fn discover_parquet_files_empty_dir() {
601601
let dir = tempfile::tempdir().expect("Failed to create a temporary directory");
602602
let url = Url::from_file_path(dir.path()).expect("Failed to parse local path");
603-
let store = logstore_for(url, StorageConfig::default()).expect("Failed to get store");
603+
let store = logstore_for(&url, StorageConfig::default()).expect("Failed to get store");
604604

605605
let files = discover_parquet_files(store.object_store(None).clone())
606606
.await
@@ -613,7 +613,7 @@ mod tests {
613613
let path = std::fs::canonicalize("../../tests/data/hive/deltatbl-non-partitioned")
614614
.expect("Failed to canonicalize");
615615
let url = Url::from_file_path(path).expect("Failed to parse local path");
616-
let store = logstore_for(url, StorageConfig::default()).expect("Failed to get store");
616+
let store = logstore_for(&url, StorageConfig::default()).expect("Failed to get store");
617617

618618
let files = discover_parquet_files(store.object_store(None).clone())
619619
.await
@@ -837,7 +837,7 @@ mod tests {
837837

838838
let files: Vec<ObjectMeta> = vec![];
839839
let store = logstore_for(
840-
Url::parse("s3://example/non-existent").unwrap(),
840+
&Url::parse("s3://example/non-existent").unwrap(),
841841
StorageConfig::default(),
842842
)
843843
.expect("Failed to get store");
@@ -858,7 +858,7 @@ mod tests {
858858
std::fs::canonicalize("../../tests/data/hive/deltatbl-non-partitioned-with-checkpoint")
859859
.expect("Failed to canonicalize");
860860
let url = Url::from_file_path(test_dir).expect("Failed to parse local path");
861-
let store = logstore_for(url, StorageConfig::default()).expect("Failed to get store");
861+
let store = logstore_for(&url, StorageConfig::default()).expect("Failed to get store");
862862

863863
let files = discover_parquet_files(store.object_store(None).clone())
864864
.await
@@ -909,7 +909,7 @@ mod tests {
909909
"../../tests/data/hive/deltatbl-partitioned",
910910
)?)
911911
.expect("Failed to parse");
912-
let storage = logstore_for(url, StorageConfig::default()).expect("Failed to get store");
912+
let storage = logstore_for(&url, StorageConfig::default()).expect("Failed to get store");
913913
let meta = storage.object_store(None).head(&location).await.unwrap();
914914

915915
let schema = fetch_parquet_schema(storage.object_store(None).clone(), meta)
@@ -1229,7 +1229,7 @@ mod tests {
12291229
// needs to work with
12301230
let table_url = Url::from_file_path(&table_path).expect("Failed to parse local path");
12311231
let store =
1232-
logstore_for(table_url, StorageConfig::default()).expect("Failed to get object store");
1232+
logstore_for(&table_url, StorageConfig::default()).expect("Failed to get object store");
12331233
let files = discover_parquet_files(store.object_store(None).clone())
12341234
.await
12351235
.expect("Failed to discover parquet files");
@@ -1326,8 +1326,8 @@ mod tests {
13261326
let formatted = format!("{}", coerced);
13271327

13281328
assert!(
1329-
formatted.contains("Timestamp(Microsecond"),
1330-
"Expected to find a Timestamp(Microsecond) in the coerced schema, got: {formatted}"
1329+
formatted.contains("Timestamp(µs)"),
1330+
"Expected to find a Timestamp(µs) in the coerced schema, got: {formatted}"
13311331
);
13321332
assert!(
13331333
!formatted.contains("Timestamp(Nanosecond"),

0 commit comments

Comments
 (0)