Skip to content

Commit e83e6f2

Browse files
fix: Resolve clippy warnings in source and test files
- Replace or_insert_with(Vec::new) with or_default() (src/auto_discovery.rs, src/discovery.rs) - Prefix unused variables with underscore (tests) - Remove redundant serde_json import (tests/vision_tests.rs)
1 parent 4f85436 commit e83e6f2

5 files changed

Lines changed: 7 additions & 8 deletions

File tree

src/auto_discovery.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ impl ModelAutoDiscovery {
305305
let group_key = format!("{}{}", base_name, extension);
306306
shard_groups
307307
.entry(group_key)
308-
.or_insert_with(Vec::new)
308+
.or_default()
309309
.push(file_path.clone());
310310
processed_files.insert(file_path.clone());
311311
}

src/discovery.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,7 @@ impl ModelDiscovery {
183183
);
184184
shard_groups
185185
.entry(group_key)
186-
.or_insert_with(Vec::new)
186+
.or_default()
187187
.push(file_path.clone());
188188
processed_files.insert(file_path.clone());
189189
} else {

tests/regression/issue_142_amd_gpu_detection.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ mod issue_142_tests {
6161

6262
// Just verify that CPU backend creation doesn't panic and works correctly
6363
let _engine = shimmy::engine::llama::LlamaEngine::new_with_backend(Some("cpu"));
64-
assert!(true); // If we get here, the test passes
64+
// Test passes if we reach here without panicking
6565
}
6666

6767
#[test]
@@ -80,9 +80,9 @@ mod issue_142_tests {
8080
let _engine = shimmy::engine::llama::LlamaEngine::new_with_backend(Some("auto"));
8181

8282
// At least one GPU variable should be set if GPU backends are available
83-
let has_cuda = env::var("GGML_CUDA").is_ok();
84-
let has_vulkan = env::var("GGML_VULKAN").is_ok();
85-
let has_opencl = env::var("GGML_OPENCL").is_ok();
83+
let _has_cuda = env::var("GGML_CUDA").is_ok();
84+
let _has_vulkan = env::var("GGML_VULKAN").is_ok();
85+
let _has_opencl = env::var("GGML_OPENCL").is_ok();
8686

8787
// If any GPU backend is enabled, at least one variable should be set
8888
#[cfg(any(

tests/vision_api_integration.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -569,6 +569,6 @@ mod vision_disabled_tests {
569569
fn test_vision_feature_disabled() {
570570
// When vision feature is disabled, these tests should not run
571571
println!("Vision feature disabled - integration tests skipped");
572-
assert!(true);
572+
// Test passes if we reach here without panicking
573573
}
574574
}

tests/vision_tests.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ mod vision_tests {
1414
use super::*;
1515
use base64::{engine::general_purpose, Engine as _};
1616
use image::{codecs::png::PngEncoder, ColorType, ImageEncoder};
17-
use serde_json;
1817
use serial_test::serial;
1918
use std::collections::HashMap;
2019

0 commit comments

Comments
 (0)