File tree Expand file tree Collapse file tree 1 file changed +23
-1
lines changed
Expand file tree Collapse file tree 1 file changed +23
-1
lines changed Original file line number Diff line number Diff line change @@ -31,15 +31,37 @@ func New(config *Config) *ProxyManager {
3131func (pm * ProxyManager ) HandleFunc (w http.ResponseWriter , r * http.Request ) {
3232
3333 // https://github.com/ggerganov/llama.cpp/blob/master/examples/server/README.md#api-endpoints
34-
3534 if r .URL .Path == "/v1/chat/completions" {
3635 // extracts the `model` from json body
3736 pm .proxyChatRequest (w , r )
37+ } else if r .URL .Path == "/v1/models" {
38+ pm .listModels (w , r )
3839 } else {
3940 pm .proxyRequest (w , r )
4041 }
4142}
4243
44+ func (pm * ProxyManager ) listModels (w http.ResponseWriter , r * http.Request ) {
45+ data := []interface {}{}
46+ for id := range pm .config .Models {
47+ data = append (data , map [string ]interface {}{
48+ "id" : id ,
49+ "object" : "model" ,
50+ "created" : time .Now ().Unix (),
51+ "owned_by" : "llama-swap" ,
52+ })
53+ }
54+
55+ // Set the Content-Type header to application/json
56+ w .Header ().Set ("Content-Type" , "application/json" )
57+
58+ // Encode the data as JSON and write it to the response writer
59+ if err := json .NewEncoder (w ).Encode (map [string ]interface {}{"data" : data }); err != nil {
60+ http .Error (w , "Error encoding JSON" , http .StatusInternalServerError )
61+ return
62+ }
63+ }
64+
4365func (pm * ProxyManager ) swapModel (requestedModel string ) error {
4466 pm .Lock ()
4567 defer pm .Unlock ()
You can’t perform that action at this time.
0 commit comments