mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-11-06 09:04:27 +00:00
Fix tests
This commit is contained in:
@@ -63,8 +63,12 @@ func TestCreateInstance_ValidationAndLimits(t *testing.T) {
|
||||
|
||||
// Test max instances limit
|
||||
backendConfig := config.BackendConfig{
|
||||
LlamaExecutable: "llama-server",
|
||||
MLXLMExecutable: "mlx_lm.server",
|
||||
LlamaCpp: config.BackendSettings{
|
||||
Command: "llama-server",
|
||||
},
|
||||
MLX: config.BackendSettings{
|
||||
Command: "mlx_lm.server",
|
||||
},
|
||||
}
|
||||
cfg := config.InstancesConfig{
|
||||
PortRange: [2]int{8000, 9000},
|
||||
|
||||
Reference in New Issue
Block a user