-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdevenv.nix
More file actions
84 lines (76 loc) · 2.57 KB
/
devenv.nix
File metadata and controls
84 lines (76 loc) · 2.57 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
{ pkgs, lib, config, inputs, ... }:
{
packages = with pkgs; [
ollama
];
enterTest = ''
wait_for_port 11435
wait_for_port 11436
# Wait for model (defined in processes:ollama) to be available
until ${lib.getExe pkgs.curl} -s http://127.0.0.1:11435/v1/models/nomic-embed-text:v1.5 | grep created; do sleep 5; done
until ${lib.getExe pkgs.curl} -s http://127.0.0.1:11435/v1/models/gemma3:1b | grep created; do sleep 5; done
until ${lib.getExe pkgs.curl} -s http://127.0.0.1:11436/v1/models/nomic-embed-text:v1.5 | grep created; do sleep 5; done
until ${lib.getExe pkgs.curl} -s http://127.0.0.1:11436/v1/models/gemma3:1b | grep created; do sleep 5; done
run-tests
'';
env = {
OLLAMA_HOST = "127.0.0.1:11435"; # use the first ollama as the default
PYTHONUNBUFFERED = "1"; # makes output from subprocesses in tests more reliably visible
};
scripts.run-tests.exec = ''
uv run pytest -vv "$@"
'';
processes = {
skvaider.exec = ''
uv run gunicorn "skvaider:app_factory()" -k uvicorn_worker.UvicornWorker --reload-extra-file config.toml
'';
ollama1 = {
exec = ''
export OLLAMA_HOST="127.0.0.1:11435"
export OLLAMA_DEBUG="1"
export OLLAMA_NUM_PARALLEL="10"
export OLLAMA_FLASH_ATTENTION="1"
export OLLAMA_SCHED_SPREAD="0"
export OLLAMA_MULTIUSER_CACHE="1"
export OLLAMA_NEW_ENGINE="1"
export OLLAMA_NEW_ESTIMATES="1"
export OLLAMA_KEEP_ALIVE="-1"
export OLLAMA_MODELS=".ollama1/models"
ollama serve&
timeout 15 bash -c "until ${lib.getExe pkgs.curl} http://localhost:11435 -s; do sleep 0.5; done"
ollama pull gemma3:1b
ollama pull nomic-embed-text:v1.5
ollama list
wait
'';
};
ollama2 = {
exec = ''
export OLLAMA_HOST="127.0.0.1:11436";
export OLLAMA_DEBUG="1"
export OLLAMA_NUM_PARALLEL="10"
export OLLAMA_FLASH_ATTENTION="1"
export OLLAMA_SCHED_SPREAD="0"
export OLLAMA_MULTIUSER_CACHE="1"
export OLLAMA_NEW_ENGINE="1"
export OLLAMA_NEW_ESTIMATES="1"
export OLLAMA_KEEP_ALIVE="-1"
export OLLAMA_MODELS=".ollama2/models"
ollama serve&
timeout 15 bash -c "until ${lib.getExe pkgs.curl} http://localhost:11436 -s; do sleep 0.5; done"
ollama pull gemma3:1b
ollama pull nomic-embed-text:v1.5
ollama list
wait
'';
};
};
languages.python = {
enable = true;
package = pkgs.python312;
uv = {
enable = true;
sync.enable = true;
};
};
}