Skip to content

Commit 829f790

Browse files
Merge pull request #900 from yasinBursali/fix/model-library-sha256-verification
fix(models): populate SHA256 catalog + verify every download part
2 parents bb7e37f + e29f787 commit 829f790

File tree

2 files changed

+67
-28
lines changed

2 files changed

+67
-28
lines changed

dream-server/bin/dream-host-agent.py

Lines changed: 45 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1072,7 +1072,6 @@ def _handle_model_download(self):
10721072

10731073
gguf_file = body.get("gguf_file", "")
10741074
gguf_url = body.get("gguf_url", "")
1075-
gguf_sha256 = body.get("gguf_sha256", "")
10761075
gguf_parts = body.get("gguf_parts", [])
10771076

10781077
if not gguf_file or (not gguf_url and not gguf_parts):
@@ -1088,9 +1087,12 @@ def _handle_model_download(self):
10881087
else:
10891088
download_plan = [(gguf_file, gguf_url)]
10901089

1091-
# Validate against library (prevent arbitrary URL downloads)
1090+
# Validate against library (prevent arbitrary URL downloads).
1091+
# Also harvest expected SHA256s keyed by filename so verification can
1092+
# cover every part of split-file downloads, not just single-file models.
10921093
library_path = INSTALL_DIR / "config" / "model-library.json"
10931094
allowed = False
1095+
expected_sha_by_file: dict = {}
10941096
if library_path.exists():
10951097
try:
10961098
lib = json.loads(library_path.read_text(encoding="utf-8"))
@@ -1099,12 +1101,21 @@ def _handle_model_download(self):
10991101
continue
11001102
if gguf_parts:
11011103
# Verify every (file, url) in the request matches the library
1102-
lib_parts = {(p["file"], p["url"]) for p in m.get("gguf_parts", [])}
1104+
lib_parts_meta = {
1105+
(p["file"], p["url"]): p.get("sha256", "")
1106+
for p in m.get("gguf_parts", [])
1107+
if p.get("file") and p.get("url")
1108+
}
11031109
req_parts = set(download_plan)
1104-
if req_parts and req_parts <= lib_parts:
1110+
if req_parts and req_parts <= set(lib_parts_meta.keys()):
11051111
allowed = True
1112+
expected_sha_by_file = {
1113+
file: lib_parts_meta[(file, url)]
1114+
for file, url in download_plan
1115+
}
11061116
elif m.get("gguf_url") == gguf_url:
11071117
allowed = True
1118+
expected_sha_by_file = {gguf_file: m.get("gguf_sha256", "")}
11081119
break
11091120
except (json.JSONDecodeError, OSError):
11101121
pass
@@ -1197,20 +1208,43 @@ def _poll_progress():
11971208
_write_model_status(status_path, "failed", part_label, 0, part_total, "Download failed after 3 attempts")
11981209
return
11991210

1200-
# Verify SHA256 if provided (single-file only)
1201-
if gguf_sha256 and len(download_plan) == 1:
1202-
final_target = models_dir / download_plan[0][0]
1211+
# Verify SHA256 for every downloaded part. Catalog is the
1212+
# source of truth: split-file models carry per-part sha256
1213+
# in expected_sha_by_file, single-file models carry one
1214+
# entry. Empty checksum -> warn (do not silently skip), so
1215+
# missing catalog entries surface during operator review.
1216+
import hashlib
1217+
for part_idx, (part_file_name, _) in enumerate(download_plan, 1):
1218+
expected = expected_sha_by_file.get(part_file_name, "")
1219+
final_target = models_dir / part_file_name
1220+
if not expected:
1221+
logger.warning(
1222+
"SHA256 verification skipped for %s: no checksum in model-library.json",
1223+
part_file_name,
1224+
)
1225+
continue
12031226
final_size = final_target.stat().st_size
1204-
_write_model_status(status_path, "verifying", gguf_file, final_size, final_size)
1205-
import hashlib
1227+
verify_label = (
1228+
part_file_name
1229+
if len(download_plan) == 1
1230+
else f"{part_file_name} (part {part_idx}/{len(download_plan)})"
1231+
)
1232+
_write_model_status(status_path, "verifying", verify_label, final_size, final_size)
12061233
sha = hashlib.sha256()
12071234
with open(final_target, "rb") as f:
12081235
for chunk in iter(lambda: f.read(1048576), b""):
12091236
sha.update(chunk)
12101237
actual = sha.hexdigest()
1211-
if actual != gguf_sha256:
1238+
if actual != expected:
12121239
final_target.unlink(missing_ok=True)
1213-
_write_model_status(status_path, "failed", gguf_file, 0, 0, f"SHA256 mismatch: expected {gguf_sha256[:12]}..., got {actual[:12]}...")
1240+
_write_model_status(
1241+
status_path,
1242+
"failed",
1243+
part_file_name,
1244+
0,
1245+
0,
1246+
f"SHA256 mismatch: expected {expected[:12]}..., got {actual[:12]}...",
1247+
)
12141248
return
12151249

12161250
_write_model_status(status_path, "complete", gguf_file, 0, 0)

dream-server/config/model-library.json

Lines changed: 22 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
"family": "qwen",
88
"gguf_file": "Qwen3.5-2B-Q4_K_M.gguf",
99
"gguf_url": "https://huggingface.co/unsloth/Qwen3.5-2B-GGUF/resolve/main/Qwen3.5-2B-Q4_K_M.gguf",
10-
"gguf_sha256": "",
10+
"gguf_sha256": "aaf42c8b7c3cab2bf3d69c355048d4a0ee9973d48f16c731c0520ee914699223",
1111
"size_mb": 1500,
1212
"vram_required_gb": 3,
1313
"context_length": 8192,
@@ -24,7 +24,7 @@
2424
"family": "phi",
2525
"gguf_file": "Phi-4-mini-instruct-Q4_K_M.gguf",
2626
"gguf_url": "https://huggingface.co/unsloth/Phi-4-mini-instruct-GGUF/resolve/main/Phi-4-mini-instruct-Q4_K_M.gguf",
27-
"gguf_sha256": "",
27+
"gguf_sha256": "88c00229914083cd112853aab84ed51b87bdf6b9ce42f532d8c85c7c63b1730a",
2828
"size_mb": 2490,
2929
"vram_required_gb": 4,
3030
"context_length": 128000,
@@ -58,7 +58,7 @@
5858
"family": "gemma4",
5959
"gguf_file": "gemma-4-E2B-it-Q4_K_M.gguf",
6060
"gguf_url": "https://huggingface.co/unsloth/gemma-4-E2B-it-GGUF/resolve/main/gemma-4-E2B-it-Q4_K_M.gguf",
61-
"gguf_sha256": "",
61+
"gguf_sha256": "ac0069ebccd39925d836f24a88c0f0c858d20578c29b21ab7cedce66ee576845",
6262
"size_mb": 2810,
6363
"vram_required_gb": 5,
6464
"context_length": 16384,
@@ -75,7 +75,7 @@
7575
"family": "deepseek",
7676
"gguf_file": "DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf",
7777
"gguf_url": "https://huggingface.co/unsloth/DeepSeek-R1-Distill-Qwen-7B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-7B-Q4_K_M.gguf",
78-
"gguf_sha256": "",
78+
"gguf_sha256": "78272d8d32084548bd450394a560eb2d70de8232ab96a725769b1f9171235c1c",
7979
"size_mb": 4680,
8080
"vram_required_gb": 7,
8181
"context_length": 32768,
@@ -92,7 +92,7 @@
9292
"family": "gemma4",
9393
"gguf_file": "gemma-4-E4B-it-Q4_K_M.gguf",
9494
"gguf_url": "https://huggingface.co/unsloth/gemma-4-E4B-it-GGUF/resolve/main/gemma-4-E4B-it-Q4_K_M.gguf",
95-
"gguf_sha256": "",
95+
"gguf_sha256": "dff0ffba4c90b4082d70214d53ce9504a28d4d8d998276dcb3b8881a656c742a",
9696
"size_mb": 5340,
9797
"vram_required_gb": 8,
9898
"context_length": 32768,
@@ -126,7 +126,7 @@
126126
"family": "phi",
127127
"gguf_file": "phi-4-Q4_K_M.gguf",
128128
"gguf_url": "https://huggingface.co/bartowski/phi-4-GGUF/resolve/main/phi-4-Q4_K_M.gguf",
129-
"gguf_sha256": "",
129+
"gguf_sha256": "009aba717c09d4a35890c7d35eb59d54e1dba884c7c526e7197d9c13ab5911d9",
130130
"size_mb": 9050,
131131
"vram_required_gb": 11,
132132
"context_length": 16384,
@@ -143,7 +143,7 @@
143143
"family": "deepseek",
144144
"gguf_file": "DeepSeek-R1-Distill-Qwen-14B-Q4_K_M.gguf",
145145
"gguf_url": "https://huggingface.co/unsloth/DeepSeek-R1-Distill-Qwen-14B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-14B-Q4_K_M.gguf",
146-
"gguf_sha256": "",
146+
"gguf_sha256": "67a7933cf2ad596a393c8e13b30bc4da2d50b283e250b78554aed18817eca31c",
147147
"size_mb": 8990,
148148
"vram_required_gb": 12,
149149
"context_length": 32768,
@@ -177,7 +177,7 @@
177177
"family": "gemma4",
178178
"gguf_file": "gemma-4-26B-A4B-it-Q4_K_M.gguf",
179179
"gguf_url": "https://huggingface.co/ggml-org/gemma-4-26B-A4B-it-GGUF/resolve/main/gemma-4-26B-A4B-it-Q4_K_M.gguf",
180-
"gguf_sha256": "",
180+
"gguf_sha256": "23c6997912cb7fa36147fe05877de73ddbb2a80ff69b18ff171b354dccf2b5b5",
181181
"size_mb": 18000,
182182
"vram_required_gb": 22,
183183
"context_length": 16384,
@@ -211,7 +211,7 @@
211211
"family": "gemma4",
212212
"gguf_file": "gemma-4-31B-it-Q4_K_M.gguf",
213213
"gguf_url": "https://huggingface.co/ggml-org/gemma-4-31B-it-GGUF/resolve/main/gemma-4-31B-it-Q4_K_M.gguf",
214-
"gguf_sha256": "",
214+
"gguf_sha256": "a20deaf2f8fc27c501f32fadfd538f8f31a76f10f47d5d3eb895f3d1112d752c",
215215
"size_mb": 19800,
216216
"vram_required_gb": 24,
217217
"context_length": 131072,
@@ -228,7 +228,7 @@
228228
"family": "deepseek",
229229
"gguf_file": "DeepSeek-R1-Distill-Qwen-32B-Q4_K_M.gguf",
230230
"gguf_url": "https://huggingface.co/unsloth/DeepSeek-R1-Distill-Qwen-32B-GGUF/resolve/main/DeepSeek-R1-Distill-Qwen-32B-Q4_K_M.gguf",
231-
"gguf_sha256": "",
231+
"gguf_sha256": "ca171ca03554ee20cf67ad6b540610ae7eabb95af00c0abd36bb73542e140fb5",
232232
"size_mb": 19900,
233233
"vram_required_gb": 24,
234234
"context_length": 32768,
@@ -245,7 +245,7 @@
245245
"family": "qwen",
246246
"gguf_file": "Qwen3.5-35B-A3B-Q4_K_M.gguf",
247247
"gguf_url": "https://huggingface.co/unsloth/Qwen3.5-35B-A3B-GGUF/resolve/main/Qwen3.5-35B-A3B-Q4_K_M.gguf",
248-
"gguf_sha256": "",
248+
"gguf_sha256": "3b46d1066bc91cc2d613e3bc22ce691dd77e6f0d33c9060690d24ce6de494375",
249249
"size_mb": 22000,
250250
"vram_required_gb": 24,
251251
"context_length": 131072,
@@ -262,7 +262,7 @@
262262
"family": "deepseek",
263263
"gguf_file": "DeepSeek-R1-Distill-Llama-70B-Q4_K_M.gguf",
264264
"gguf_url": "https://huggingface.co/unsloth/DeepSeek-R1-Distill-Llama-70B-GGUF/resolve/main/DeepSeek-R1-Distill-Llama-70B-Q4_K_M.gguf",
265-
"gguf_sha256": "",
265+
"gguf_sha256": "952ff479c48ac3ece81fb6d9a5a03bfeac215e6caac780fbe91ff8cb0e05bcf3",
266266
"size_mb": 42500,
267267
"vram_required_gb": 48,
268268
"context_length": 32768,
@@ -300,11 +300,13 @@
300300
"gguf_parts": [
301301
{
302302
"file": "Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00001-of-00002.gguf",
303-
"url": "https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF/resolve/main/Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00001-of-00002.gguf"
303+
"url": "https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF/resolve/main/Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00001-of-00002.gguf",
304+
"sha256": "fbe956902467171ed7c0c326e5d868771a84d46468d407abecd0f289297313f9"
304305
},
305306
{
306307
"file": "Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00002-of-00002.gguf",
307-
"url": "https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF/resolve/main/Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00002-of-00002.gguf"
308+
"url": "https://huggingface.co/unsloth/Llama-4-Scout-17B-16E-Instruct-GGUF/resolve/main/Q4_K_M/Llama-4-Scout-17B-16E-Instruct-Q4_K_M-00002-of-00002.gguf",
309+
"sha256": "e7330ae14527f08e8a44a6a573b45084052b8822c4b8a5179c5cad9cd6f6f795"
308310
}
309311
],
310312
"size_mb": 65300,
@@ -327,15 +329,18 @@
327329
"gguf_parts": [
328330
{
329331
"file": "Qwen3.5-122B-A10B-Q4_K_M-00001-of-00003.gguf",
330-
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00001-of-00003.gguf"
332+
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00001-of-00003.gguf",
333+
"sha256": "467c9bd92ea518539cf75bf5a5fbfbd35e9a0b40d766ccaa67bf120e12041df3"
331334
},
332335
{
333336
"file": "Qwen3.5-122B-A10B-Q4_K_M-00002-of-00003.gguf",
334-
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00002-of-00003.gguf"
337+
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00002-of-00003.gguf",
338+
"sha256": "90db14846413aebdac365b57206441437cac5f7e5037d94b325f0167f902e6e7"
335339
},
336340
{
337341
"file": "Qwen3.5-122B-A10B-Q4_K_M-00003-of-00003.gguf",
338-
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00003-of-00003.gguf"
342+
"url": "https://huggingface.co/unsloth/Qwen3.5-122B-A10B-GGUF/resolve/main/Q4_K_M/Qwen3.5-122B-A10B-Q4_K_M-00003-of-00003.gguf",
343+
"sha256": "e3c24b8ebec070bb4f69ea0aca25a16531da7440cd515529953e046882901f97"
339344
}
340345
],
341346
"size_mb": 76500,

0 commit comments

Comments
 (0)