Support llama.cpp router mode for openai endpoints

This commit is contained in:
2025-12-21 23:32:33 +01:00
parent 41d904475c
commit ee122d669c
5 changed files with 174 additions and 183 deletions

View File

@@ -7,6 +7,7 @@ import (
"net/http"
"time"
"llamactl/pkg/backends"
"llamactl/pkg/config"
)
@@ -117,6 +118,14 @@ func (i *Instance) WaitForHealthy(timeout int) error {
return i.process.waitForHealthy(timeout)
}
func (i *Instance) GetBackendType() backends.BackendType {
opts := i.GetOptions()
if opts == nil {
return backends.BackendTypeUnknown
}
return opts.BackendOptions.BackendType
}
// GetOptions returns the current options
func (i *Instance) GetOptions() *Options {
if i.options == nil {