mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-11-06 00:54:23 +00:00
Add MaxRunningInstances to InstancesConfig and implement IsRunning method
This commit is contained in:
@@ -55,6 +55,9 @@ type InstancesConfig struct {
|
||||
// Maximum number of instances that can be created
|
||||
MaxInstances int `yaml:"max_instances"`
|
||||
|
||||
// Maximum number of instances that can be running at the same time
|
||||
MaxRunningInstances int `yaml:"max_running_instances,omitempty"`
|
||||
|
||||
// Path to llama-server executable
|
||||
LlamaExecutable string `yaml:"llama_executable"`
|
||||
|
||||
@@ -113,6 +116,7 @@ func LoadConfig(configPath string) (AppConfig, error) {
|
||||
LogsDir: filepath.Join(getDefaultDataDirectory(), "logs"),
|
||||
AutoCreateDirs: true,
|
||||
MaxInstances: -1, // -1 means unlimited
|
||||
MaxRunningInstances: -1, // -1 means unlimited
|
||||
LlamaExecutable: "llama-server",
|
||||
DefaultAutoRestart: true,
|
||||
DefaultMaxRestarts: 3,
|
||||
@@ -211,6 +215,11 @@ func loadEnvVars(cfg *AppConfig) {
|
||||
cfg.Instances.MaxInstances = m
|
||||
}
|
||||
}
|
||||
if maxRunning := os.Getenv("LLAMACTL_MAX_RUNNING_INSTANCES"); maxRunning != "" {
|
||||
if m, err := strconv.Atoi(maxRunning); err == nil {
|
||||
cfg.Instances.MaxRunningInstances = m
|
||||
}
|
||||
}
|
||||
if llamaExec := os.Getenv("LLAMACTL_LLAMA_EXECUTABLE"); llamaExec != "" {
|
||||
cfg.Instances.LlamaExecutable = llamaExec
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user