mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-11-06 00:54:23 +00:00
Ensure local node is defined in LoadConfig by adding default config if missing
This commit is contained in:
@@ -150,9 +150,7 @@ func LoadConfig(configPath string) (AppConfig, error) {
|
||||
EnableSwagger: false,
|
||||
},
|
||||
LocalNode: "main",
|
||||
Nodes: map[string]NodeConfig{
|
||||
"main": {}, // Local node with empty config
|
||||
},
|
||||
Nodes: map[string]NodeConfig{},
|
||||
Backends: BackendConfig{
|
||||
LlamaCpp: BackendSettings{
|
||||
Command: "llama-server",
|
||||
@@ -217,6 +215,11 @@ func LoadConfig(configPath string) (AppConfig, error) {
|
||||
return cfg, err
|
||||
}
|
||||
|
||||
// If local node is not defined in nodes, add it with default config
|
||||
if _, ok := cfg.Nodes[cfg.LocalNode]; !ok {
|
||||
cfg.Nodes[cfg.LocalNode] = NodeConfig{}
|
||||
}
|
||||
|
||||
// 3. Override with environment variables
|
||||
loadEnvVars(&cfg)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user