Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
241 changes: 206 additions & 35 deletions dream-server/bin/dream-host-agent.py

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,13 @@ def download_model(model_id: str, api_key: str = Depends(verify_api_key)):
return result


@router.post("/api/models/download/cancel")
def cancel_download(api_key: str = Depends(verify_api_key)):
"""Cancel an in-progress model download."""
result = _call_agent_model("/v1/model/download/cancel", {})
return result


@router.post("/api/models/{model_id}/load")
def load_model(model_id: str, api_key: str = Depends(verify_api_key)):
"""Activate a model — update config and restart llama-server."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -449,3 +449,44 @@ def test_500_missing_schema(self, env_update_env):

assert handler.response_code == 500
assert ".env.schema.json not found" in handler.parse_response()["error"]


class TestHandleModelDownloadCancel:

def test_returns_no_download_when_idle(self, monkeypatch):
handler = _FakeHandler(b"")
monkeypatch.setattr(_mod, "AGENT_API_KEY", "test-key")
monkeypatch.setattr(_mod, "_model_download_thread", None)
_mod._model_download_cancel.clear()

_mod.AgentHandler._handle_model_download_cancel(handler)

assert handler.response_code == 200
assert handler.parse_response()["status"] == "no_download"
assert _mod._model_download_cancel.is_set() is False

def test_sets_cancel_flag_and_kills_active_proc(self, monkeypatch):
class _AliveThread:
def is_alive(self):
return True

class _FakeProc:
def __init__(self):
self.killed = False

def kill(self):
self.killed = True

handler = _FakeHandler(b"")
proc = _FakeProc()
monkeypatch.setattr(_mod, "AGENT_API_KEY", "test-key")
monkeypatch.setattr(_mod, "_model_download_thread", _AliveThread())
monkeypatch.setattr(_mod, "_model_download_proc", proc)
_mod._model_download_cancel.clear()

_mod.AgentHandler._handle_model_download_cancel(handler)

assert handler.response_code == 200
assert handler.parse_response()["status"] == "cancelling"
assert _mod._model_download_cancel.is_set() is True
assert proc.killed is True
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
_check_lemonade_health = _mod._check_lemonade_health
_send_lemonade_warmup = _mod._send_lemonade_warmup
_write_lemonade_config = _mod._write_lemonade_config
_compose_restart_llama_server = _mod._compose_restart_llama_server
_launch_native_llama_server = _mod._launch_native_llama_server


# --- _check_lemonade_health ---
Expand Down Expand Up @@ -136,6 +138,78 @@ def test_file_path(self, tmp_path):
assert (litellm_dir / "lemonade.yaml").exists()


class TestComposeRestartLlamaServer:

def test_amd_uses_stop_then_up(self, monkeypatch, tmp_path):
calls = []

def fake_run(cmd, **kwargs):
calls.append(cmd)
return subprocess.CompletedProcess(cmd, 0, stdout="", stderr="")

monkeypatch.setattr(_mod, "INSTALL_DIR", tmp_path)
monkeypatch.setattr(
_mod,
"resolve_compose_flags",
lambda: ["--env-file", ".env", "-f", "docker-compose.base.yml"],
)
monkeypatch.setattr(subprocess, "run", fake_run)

_compose_restart_llama_server({"GPU_BACKEND": "amd"})

assert calls == [
[
"docker", "compose", "--env-file", ".env", "-f",
"docker-compose.base.yml", "stop", "llama-server",
],
[
"docker", "compose", "--env-file", ".env", "-f",
"docker-compose.base.yml", "up", "-d", "llama-server",
],
]


class TestLaunchNativeLlamaServer:

def test_reads_env_and_writes_pid(self, monkeypatch, tmp_path):
env_path = tmp_path / ".env"
env_path.write_text(
"GGUF_FILE=test-model.gguf\nCTX_SIZE=8192\nLLAMA_REASONING=on\n",
encoding="utf-8",
)
(tmp_path / "data" / "models").mkdir(parents=True)
(tmp_path / "data").mkdir(exist_ok=True)
llama_bin = tmp_path / "bin" / "llama-server"
llama_bin.parent.mkdir(parents=True)
llama_bin.write_text("", encoding="utf-8")
llama_log = tmp_path / "data" / "llama-server.log"
pid_file = tmp_path / "data" / ".llama-server.pid"

calls = []

class _FakeProc:
pid = 4321

def fake_popen(cmd, **kwargs):
calls.append((cmd, kwargs))
return _FakeProc()

monkeypatch.setattr(_mod, "INSTALL_DIR", tmp_path)
monkeypatch.setattr(subprocess, "Popen", fake_popen)

_launch_native_llama_server(env_path, llama_bin, llama_log, pid_file)

assert pid_file.read_text(encoding="utf-8") == "4321"
cmd, _kwargs = calls[0]
assert cmd[0] == str(llama_bin)
assert "--model" in cmd
assert str(tmp_path / "data" / "models" / "test-model.gguf") in cmd
assert "--ctx-size" in cmd
assert "8192" in cmd
assert "--reasoning-format" in cmd
assert "deepseek" in cmd


# --- Rollback integration ---


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,14 @@
} else if (data.status === 'complete' || data.status === 'idle') {
setIsDownloading(false)
setProgress(null)
} else if (data.status === 'failed' || data.status === 'error') {
} else if (data.status === 'failed' || data.status === 'error' || data.status === 'cancelled') {
setIsDownloading(false)
setProgress({
error: data.error || data.message || 'Download failed',
error: data.error || data.message || (data.status === 'cancelled' ? 'Download cancelled' : 'Download failed'),
model: data.model
})
}
} catch (err) {

Check warning on line 44 in dream-server/extensions/services/dashboard/src/hooks/useDownloadProgress.js

View workflow job for this annotation

GitHub Actions / frontend

'err' is defined but never used
// Silently fail - API might not be available
}
}, [])
Expand Down Expand Up @@ -76,11 +76,21 @@
return eta
}

const cancelDownload = useCallback(async () => {
try {
await fetch('/api/models/download/cancel', { method: 'POST' })
fetchProgress()
} catch (err) {
console.error('Failed to cancel download:', err)
}
}, [fetchProgress])

return {
isDownloading,
progress,
formatBytes,
formatEta,
refresh: fetchProgress
refresh: fetchProgress,
cancelDownload
}
}
16 changes: 13 additions & 3 deletions dream-server/extensions/services/dashboard/src/pages/Models.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -319,9 +319,19 @@ function DownloadProgressBar({ progress, helpers }) {
</p>
</div>
</div>
<span className="text-lg font-bold text-theme-accent">
{progress.percent?.toFixed(0) || 0}%
</span>
<div className="flex items-center gap-3">
<span className="text-lg font-bold text-theme-accent">
{progress.percent?.toFixed(0) || 0}%
</span>
{helpers.cancelDownload && (
<button
onClick={helpers.cancelDownload}
className="px-3 py-1 text-xs font-medium text-red-400 border border-red-400/30 rounded-lg hover:bg-red-400/10 transition-colors"
>
Cancel
</button>
)}
</div>
</div>

<div className="h-3 bg-theme-border rounded-full overflow-hidden">
Expand Down
Loading