18 Commits

Author SHA1 Message Date
38790aa507 Support llama.cpp router mode for openai endpoints 2025-12-21 23:32:33 +01:00
faf026aa54 Remove model registry 2025-12-21 20:48:22 +01:00
fd9e651e09 Implement model management for llama.cpp instances 2025-12-18 19:14:20 +01:00
f3c02b4939 Merge pull request #108 from lordmathis/refactor/config
refactor: Split large config file
2025-12-13 14:27:25 +01:00
0a85409deb Split large config file 2025-12-13 13:50:59 +01:00
22fd295250 Merge pull request #107 from lordmathis/feat/logrotate
feat: Add log rotation for instance logs
2025-12-13 13:30:20 +01:00
c0cecdd377 Clean up logger 2025-12-13 13:18:30 +01:00
4d57b37a5d Remove verbose _mb suffix 2025-12-13 13:06:22 +01:00
c13b71d07f Document new log rotation config options 2025-12-13 13:02:22 +01:00
406a711682 Move LogRotationConfig to logger package 2025-12-13 12:48:50 +01:00
0b3d654945 Simplify logging config 2025-12-13 12:48:50 +01:00
e2a49402d6 Implement instance log rotation 2025-12-13 12:48:50 +01:00
48836c9c12 Merge pull request #105 from lordmathis/dependabot/npm_and_yarn/webui/npm-production-3fe24f4500
chore: bump lucide-react from 0.555.0 to 0.556.0 in /webui in the npm-production group
2025-12-12 10:43:12 +01:00
4200b8eed9 Merge pull request #104 from lordmathis/dependabot/go_modules/go-dependencies-f180a085e8
chore: bump golang.org/x/crypto from 0.45.0 to 0.46.0 in the go-dependencies group
2025-12-11 18:51:40 +01:00
dependabot[bot]
9a7ae87df8 chore: bump lucide-react in /webui in the npm-production group
Bumps the npm-production group in /webui with 1 update: [lucide-react](https://github.com/lucide-icons/lucide/tree/HEAD/packages/lucide-react).


Updates `lucide-react` from 0.555.0 to 0.556.0
- [Release notes](https://github.com/lucide-icons/lucide/releases)
- [Commits](https://github.com/lucide-icons/lucide/commits/0.556.0/packages/lucide-react)

---
updated-dependencies:
- dependency-name: lucide-react
  dependency-version: 0.556.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: npm-production
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-11 17:17:18 +00:00
e54c495528 Merge pull request #106 from lordmathis/dependabot/npm_and_yarn/webui/npm-development-808d3127cd
chore: bump the npm-development group in /webui with 2 updates
2025-12-11 18:12:15 +01:00
dependabot[bot]
83006968ca chore: bump the npm-development group in /webui with 2 updates
Bumps the npm-development group in /webui with 2 updates: [jsdom](https://github.com/jsdom/jsdom) and [typescript-eslint](https://github.com/typescript-eslint/typescript-eslint/tree/HEAD/packages/typescript-eslint).


Updates `jsdom` from 27.2.0 to 27.3.0
- [Release notes](https://github.com/jsdom/jsdom/releases)
- [Changelog](https://github.com/jsdom/jsdom/blob/main/Changelog.md)
- [Commits](https://github.com/jsdom/jsdom/compare/27.2.0...27.3.0)

Updates `typescript-eslint` from 8.48.0 to 8.49.0
- [Release notes](https://github.com/typescript-eslint/typescript-eslint/releases)
- [Changelog](https://github.com/typescript-eslint/typescript-eslint/blob/main/packages/typescript-eslint/CHANGELOG.md)
- [Commits](https://github.com/typescript-eslint/typescript-eslint/commits/v8.49.0/packages/typescript-eslint)

---
updated-dependencies:
- dependency-name: jsdom
  dependency-version: 27.3.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: npm-development
- dependency-name: typescript-eslint
  dependency-version: 8.49.0
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: npm-development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 21:31:36 +00:00
dependabot[bot]
c8d9c6907c chore: bump golang.org/x/crypto in the go-dependencies group
Bumps the go-dependencies group with 1 update: [golang.org/x/crypto](https://github.com/golang/crypto).


Updates `golang.org/x/crypto` from 0.45.0 to 0.46.0
- [Commits](https://github.com/golang/crypto/compare/v0.45.0...v0.46.0)

---
updated-dependencies:
- dependency-name: golang.org/x/crypto
  dependency-version: 0.46.0
  dependency-type: direct:production
  update-type: version-update:semver-minor
  dependency-group: go-dependencies
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-12-08 21:30:38 +00:00
27 changed files with 1744 additions and 793 deletions

View File

@@ -195,6 +195,9 @@ instances:
default_on_demand_start: true # Default on-demand start setting
on_demand_start_timeout: 120 # Default on-demand start timeout in seconds
timeout_check_interval: 5 # Idle instance timeout check in minutes
log_rotation_enabled: true # Enable log rotation (default: true)
log_rotation_max_size: 100 # Max log file size in MB before rotation (default: 100)
log_rotation_compress: false # Compress rotated log files (default: false)
database:
path: ~/.local/share/llamactl/llamactl.db # Database file path (platform dependent)

View File

@@ -230,6 +230,9 @@ instances:
default_on_demand_start: true # Default on-demand start setting
on_demand_start_timeout: 120 # Default on-demand start timeout in seconds
timeout_check_interval: 5 # Default instance timeout check interval in minutes
log_rotation_enabled: true # Enable log rotation (default: true)
log_rotation_max_size: 100 # Max log file size in MB before rotation (default: 100)
log_rotation_compress: false # Compress rotated log files (default: false)
```
**Environment Variables:**
@@ -246,6 +249,9 @@ instances:
- `LLAMACTL_DEFAULT_ON_DEMAND_START` - Default on-demand start setting (true/false)
- `LLAMACTL_ON_DEMAND_START_TIMEOUT` - Default on-demand start timeout in seconds
- `LLAMACTL_TIMEOUT_CHECK_INTERVAL` - Default instance timeout check interval in minutes
- `LLAMACTL_LOG_ROTATION_ENABLED` - Enable log rotation (true/false)
- `LLAMACTL_LOG_ROTATION_MAX_SIZE` - Max log file size in MB
- `LLAMACTL_LOG_ROTATION_COMPRESS` - Compress rotated logs (true/false)
### Database Configuration

6
go.mod
View File

@@ -3,13 +3,14 @@ module llamactl
go 1.24.5
require (
github.com/DeRuina/timberjack v1.3.9
github.com/go-chi/chi/v5 v5.2.2
github.com/go-chi/cors v1.2.2
github.com/golang-migrate/migrate/v4 v4.19.1
github.com/mattn/go-sqlite3 v1.14.24
github.com/swaggo/http-swagger v1.3.4
github.com/swaggo/swag v1.16.5
golang.org/x/crypto v0.45.0
golang.org/x/crypto v0.46.0
gopkg.in/yaml.v3 v3.0.1
)
@@ -20,11 +21,12 @@ require (
github.com/go-openapi/spec v0.21.0 // indirect
github.com/go-openapi/swag v0.23.1 // indirect
github.com/josharian/intern v1.0.0 // indirect
github.com/klauspost/compress v1.17.11 // indirect
github.com/mailru/easyjson v0.9.0 // indirect
github.com/swaggo/files v1.0.1 // indirect
golang.org/x/mod v0.29.0 // indirect
golang.org/x/net v0.47.0 // indirect
golang.org/x/sync v0.18.0 // indirect
golang.org/x/sys v0.38.0 // indirect
golang.org/x/sys v0.39.0 // indirect
golang.org/x/tools v0.38.0 // indirect
)

14
go.sum
View File

@@ -1,7 +1,11 @@
github.com/DeRuina/timberjack v1.3.9 h1:6UXZ1I7ExPGTX/1UNYawR58LlOJUHKBPiYC7WQ91eBo=
github.com/DeRuina/timberjack v1.3.9/go.mod h1:RLoeQrwrCGIEF8gO5nV5b/gMD0QIy7bzQhBUgpp1EqE=
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
github.com/go-chi/chi/v5 v5.2.2 h1:CMwsvRVTbXVytCk1Wd72Zy1LAsAh9GxMmSNWLHCG618=
github.com/go-chi/chi/v5 v5.2.2/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
@@ -20,6 +24,8 @@ github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
@@ -45,8 +51,8 @@ github.com/swaggo/swag v1.16.5/go.mod h1:ngP2etMK5a0P3QBizic5MEwpRmluJZPHjXcMoj4
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/crypto v0.46.0 h1:cKRW/pmt1pKAfetfu+RCEvjvZkA9RimPbh7bhFjGVBU=
golang.org/x/crypto v0.46.0/go.mod h1:Evb/oLKmMraqjZ2iQTwDwvCtJkczlDuTmdJXoZVzqU0=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA=
golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w=
@@ -66,8 +72,8 @@ golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc=
golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.39.0 h1:CvCKL8MeisomCi6qNZ+wbb0DN9E5AATixKsvNtMoMFk=
golang.org/x/sys v0.39.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=

View File

@@ -14,6 +14,7 @@ const (
BackendTypeLlamaCpp BackendType = "llama_cpp"
BackendTypeMlxLm BackendType = "mlx_lm"
BackendTypeVllm BackendType = "vllm"
BackendTypeUnknown BackendType = "unknown"
)
type backend interface {

View File

@@ -6,233 +6,18 @@ import (
"log"
"os"
"path/filepath"
"runtime"
"strconv"
"strings"
"time"
"gopkg.in/yaml.v3"
)
// BackendSettings contains structured backend configuration
type BackendSettings struct {
Command string `yaml:"command" json:"command"`
Args []string `yaml:"args" json:"args"`
Environment map[string]string `yaml:"environment,omitempty" json:"environment,omitempty"`
Docker *DockerSettings `yaml:"docker,omitempty" json:"docker,omitempty"`
ResponseHeaders map[string]string `yaml:"response_headers,omitempty" json:"response_headers,omitempty"`
}
// DockerSettings contains Docker-specific configuration
type DockerSettings struct {
Enabled bool `yaml:"enabled" json:"enabled"`
Image string `yaml:"image" json:"image"`
Args []string `yaml:"args" json:"args"`
Environment map[string]string `yaml:"environment,omitempty" json:"environment,omitempty"`
}
// BackendConfig contains backend executable configurations
type BackendConfig struct {
LlamaCpp BackendSettings `yaml:"llama-cpp" json:"llama-cpp"`
VLLM BackendSettings `yaml:"vllm" json:"vllm"`
MLX BackendSettings `yaml:"mlx" json:"mlx"`
}
// AppConfig represents the configuration for llamactl
type AppConfig struct {
Server ServerConfig `yaml:"server" json:"server"`
Backends BackendConfig `yaml:"backends" json:"backends"`
Instances InstancesConfig `yaml:"instances" json:"instances"`
Database DatabaseConfig `yaml:"database" json:"database"`
Auth AuthConfig `yaml:"auth" json:"auth"`
LocalNode string `yaml:"local_node,omitempty" json:"local_node,omitempty"`
Nodes map[string]NodeConfig `yaml:"nodes,omitempty" json:"nodes,omitempty"`
// Directory where all llamactl data will be stored (database, instances, logs, etc.)
DataDir string `yaml:"data_dir" json:"data_dir"`
Version string `yaml:"-" json:"version"`
CommitHash string `yaml:"-" json:"commit_hash"`
BuildTime string `yaml:"-" json:"build_time"`
}
// ServerConfig contains HTTP server configuration
type ServerConfig struct {
// Server host to bind to
Host string `yaml:"host" json:"host"`
// Server port to bind to
Port int `yaml:"port" json:"port"`
// Allowed origins for CORS (e.g., "http://localhost:3000")
AllowedOrigins []string `yaml:"allowed_origins" json:"allowed_origins"`
// Allowed headers for CORS (e.g., "Accept", "Authorization", "Content-Type", "X-CSRF-Token")
AllowedHeaders []string `yaml:"allowed_headers" json:"allowed_headers"`
// Enable Swagger UI for API documentation
EnableSwagger bool `yaml:"enable_swagger" json:"enable_swagger"`
// Response headers to send with responses
ResponseHeaders map[string]string `yaml:"response_headers,omitempty" json:"response_headers,omitempty"`
}
// DatabaseConfig contains database configuration settings
type DatabaseConfig struct {
// Database file path (relative to the top-level data_dir or absolute)
Path string `yaml:"path" json:"path"`
// Connection settings
MaxOpenConnections int `yaml:"max_open_connections" json:"max_open_connections"`
MaxIdleConnections int `yaml:"max_idle_connections" json:"max_idle_connections"`
ConnMaxLifetime time.Duration `yaml:"connection_max_lifetime" json:"connection_max_lifetime" swaggertype:"string" example:"1h"`
}
// InstancesConfig contains instance management configuration
type InstancesConfig struct {
// Port range for instances (e.g., 8000,9000)
PortRange [2]int `yaml:"port_range" json:"port_range"`
// Instance config directory override (relative to data_dir if not absolute)
InstancesDir string `yaml:"configs_dir" json:"configs_dir"`
// Logs directory override (relative to data_dir if not absolute)
LogsDir string `yaml:"logs_dir" json:"logs_dir"`
// Automatically create the data directory if it doesn't exist
AutoCreateDirs bool `yaml:"auto_create_dirs" json:"auto_create_dirs"`
// Maximum number of instances that can be created
MaxInstances int `yaml:"max_instances" json:"max_instances"`
// Maximum number of instances that can be running at the same time
MaxRunningInstances int `yaml:"max_running_instances,omitempty" json:"max_running_instances,omitempty"`
// Enable LRU eviction for instance logs
EnableLRUEviction bool `yaml:"enable_lru_eviction" json:"enable_lru_eviction"`
// Default auto-restart setting for new instances
DefaultAutoRestart bool `yaml:"default_auto_restart" json:"default_auto_restart"`
// Default max restarts for new instances
DefaultMaxRestarts int `yaml:"default_max_restarts" json:"default_max_restarts"`
// Default restart delay for new instances (in seconds)
DefaultRestartDelay int `yaml:"default_restart_delay" json:"default_restart_delay"`
// Default on-demand start setting for new instances
DefaultOnDemandStart bool `yaml:"default_on_demand_start" json:"default_on_demand_start"`
// How long to wait for an instance to start on demand (in seconds)
OnDemandStartTimeout int `yaml:"on_demand_start_timeout,omitempty" json:"on_demand_start_timeout,omitempty"`
// Interval for checking instance timeouts (in minutes)
TimeoutCheckInterval int `yaml:"timeout_check_interval" json:"timeout_check_interval"`
}
// AuthConfig contains authentication settings
type AuthConfig struct {
// Require authentication for OpenAI compatible inference endpoints
RequireInferenceAuth bool `yaml:"require_inference_auth" json:"require_inference_auth"`
// List of keys for OpenAI compatible inference endpoints
InferenceKeys []string `yaml:"inference_keys" json:"inference_keys"`
// Require authentication for management endpoints
RequireManagementAuth bool `yaml:"require_management_auth" json:"require_management_auth"`
// List of keys for management endpoints
ManagementKeys []string `yaml:"management_keys" json:"management_keys"`
}
type NodeConfig struct {
Address string `yaml:"address" json:"address"`
APIKey string `yaml:"api_key,omitempty" json:"api_key,omitempty"`
}
// LoadConfig loads configuration with the following precedence:
// 1. Hardcoded defaults
// 2. Config file
// 3. Environment variables
func LoadConfig(configPath string) (AppConfig, error) {
// 1. Start with defaults
defaultDataDir := getDefaultDataDirectory()
cfg := AppConfig{
Server: ServerConfig{
Host: "0.0.0.0",
Port: 8080,
AllowedOrigins: []string{"*"}, // Default to allow all origins
AllowedHeaders: []string{"*"}, // Default to allow all headers
EnableSwagger: false,
},
LocalNode: "main",
Nodes: map[string]NodeConfig{},
DataDir: defaultDataDir,
Backends: BackendConfig{
LlamaCpp: BackendSettings{
Command: "llama-server",
Args: []string{},
Environment: map[string]string{},
Docker: &DockerSettings{
Enabled: false,
Image: "ghcr.io/ggml-org/llama.cpp:server",
Args: []string{
"run", "--rm", "--network", "host", "--gpus", "all",
"-v", filepath.Join(defaultDataDir, "llama.cpp") + ":/root/.cache/llama.cpp"},
Environment: map[string]string{},
},
},
VLLM: BackendSettings{
Command: "vllm",
Args: []string{"serve"},
Docker: &DockerSettings{
Enabled: false,
Image: "vllm/vllm-openai:latest",
Args: []string{
"run", "--rm", "--network", "host", "--gpus", "all", "--shm-size", "1g",
"-v", filepath.Join(defaultDataDir, "huggingface") + ":/root/.cache/huggingface",
},
Environment: map[string]string{},
},
},
MLX: BackendSettings{
Command: "mlx_lm.server",
Args: []string{},
// No Docker section for MLX - not supported
},
},
Instances: InstancesConfig{
PortRange: [2]int{8000, 9000},
// NOTE: empty strings are set as placeholder values since InstancesDir and LogsDir
// should be relative path to DataDir if not explicitly set.
InstancesDir: "",
LogsDir: "",
AutoCreateDirs: true,
MaxInstances: -1, // -1 means unlimited
MaxRunningInstances: -1, // -1 means unlimited
EnableLRUEviction: true,
DefaultAutoRestart: true,
DefaultMaxRestarts: 3,
DefaultRestartDelay: 5,
DefaultOnDemandStart: true,
OnDemandStartTimeout: 120, // 2 minutes
TimeoutCheckInterval: 5, // Check timeouts every 5 minutes
},
Database: DatabaseConfig{
Path: "", // Will be set to data_dir/llamactl.db if empty
MaxOpenConnections: 25,
MaxIdleConnections: 5,
ConnMaxLifetime: 5 * time.Minute,
},
Auth: AuthConfig{
RequireInferenceAuth: true,
InferenceKeys: []string{},
RequireManagementAuth: true,
ManagementKeys: []string{},
},
}
defaultDataDir := getDefaultDataDir()
cfg := getDefaultConfig(defaultDataDir)
// 2. Load from config file
if err := loadConfigFile(&cfg, configPath); err != nil {
@@ -300,372 +85,6 @@ func loadConfigFile(cfg *AppConfig, configPath string) error {
return nil
}
// loadEnvVars overrides config with environment variables
func loadEnvVars(cfg *AppConfig) {
// Server config
if host := os.Getenv("LLAMACTL_HOST"); host != "" {
cfg.Server.Host = host
}
if port := os.Getenv("LLAMACTL_PORT"); port != "" {
if p, err := strconv.Atoi(port); err == nil {
cfg.Server.Port = p
}
}
if allowedOrigins := os.Getenv("LLAMACTL_ALLOWED_ORIGINS"); allowedOrigins != "" {
cfg.Server.AllowedOrigins = strings.Split(allowedOrigins, ",")
}
if enableSwagger := os.Getenv("LLAMACTL_ENABLE_SWAGGER"); enableSwagger != "" {
if b, err := strconv.ParseBool(enableSwagger); err == nil {
cfg.Server.EnableSwagger = b
}
}
// Data config
if dataDir := os.Getenv("LLAMACTL_DATA_DIRECTORY"); dataDir != "" {
cfg.DataDir = dataDir
}
if instancesDir := os.Getenv("LLAMACTL_INSTANCES_DIR"); instancesDir != "" {
cfg.Instances.InstancesDir = instancesDir
}
if logsDir := os.Getenv("LLAMACTL_LOGS_DIR"); logsDir != "" {
cfg.Instances.LogsDir = logsDir
}
if autoCreate := os.Getenv("LLAMACTL_AUTO_CREATE_DATA_DIR"); autoCreate != "" {
if b, err := strconv.ParseBool(autoCreate); err == nil {
cfg.Instances.AutoCreateDirs = b
}
}
// Instance config
if portRange := os.Getenv("LLAMACTL_INSTANCE_PORT_RANGE"); portRange != "" {
if ports := ParsePortRange(portRange); ports != [2]int{0, 0} {
cfg.Instances.PortRange = ports
}
}
if maxInstances := os.Getenv("LLAMACTL_MAX_INSTANCES"); maxInstances != "" {
if m, err := strconv.Atoi(maxInstances); err == nil {
cfg.Instances.MaxInstances = m
}
}
if maxRunning := os.Getenv("LLAMACTL_MAX_RUNNING_INSTANCES"); maxRunning != "" {
if m, err := strconv.Atoi(maxRunning); err == nil {
cfg.Instances.MaxRunningInstances = m
}
}
if enableLRUEviction := os.Getenv("LLAMACTL_ENABLE_LRU_EVICTION"); enableLRUEviction != "" {
if b, err := strconv.ParseBool(enableLRUEviction); err == nil {
cfg.Instances.EnableLRUEviction = b
}
}
// Backend config
// LlamaCpp backend
if llamaCmd := os.Getenv("LLAMACTL_LLAMACPP_COMMAND"); llamaCmd != "" {
cfg.Backends.LlamaCpp.Command = llamaCmd
}
if llamaArgs := os.Getenv("LLAMACTL_LLAMACPP_ARGS"); llamaArgs != "" {
cfg.Backends.LlamaCpp.Args = strings.Split(llamaArgs, " ")
}
if llamaEnv := os.Getenv("LLAMACTL_LLAMACPP_ENV"); llamaEnv != "" {
if cfg.Backends.LlamaCpp.Environment == nil {
cfg.Backends.LlamaCpp.Environment = make(map[string]string)
}
parseEnvVars(llamaEnv, cfg.Backends.LlamaCpp.Environment)
}
if llamaDockerEnabled := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ENABLED"); llamaDockerEnabled != "" {
if b, err := strconv.ParseBool(llamaDockerEnabled); err == nil {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Enabled = b
}
}
if llamaDockerImage := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_IMAGE"); llamaDockerImage != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Image = llamaDockerImage
}
if llamaDockerArgs := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ARGS"); llamaDockerArgs != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Args = strings.Split(llamaDockerArgs, " ")
}
if llamaDockerEnv := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ENV"); llamaDockerEnv != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
if cfg.Backends.LlamaCpp.Docker.Environment == nil {
cfg.Backends.LlamaCpp.Docker.Environment = make(map[string]string)
}
parseEnvVars(llamaDockerEnv, cfg.Backends.LlamaCpp.Docker.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_LLAMACPP_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.LlamaCpp.ResponseHeaders == nil {
cfg.Backends.LlamaCpp.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.LlamaCpp.ResponseHeaders)
}
// vLLM backend
if vllmCmd := os.Getenv("LLAMACTL_VLLM_COMMAND"); vllmCmd != "" {
cfg.Backends.VLLM.Command = vllmCmd
}
if vllmArgs := os.Getenv("LLAMACTL_VLLM_ARGS"); vllmArgs != "" {
cfg.Backends.VLLM.Args = strings.Split(vllmArgs, " ")
}
if vllmEnv := os.Getenv("LLAMACTL_VLLM_ENV"); vllmEnv != "" {
if cfg.Backends.VLLM.Environment == nil {
cfg.Backends.VLLM.Environment = make(map[string]string)
}
parseEnvVars(vllmEnv, cfg.Backends.VLLM.Environment)
}
if vllmDockerEnabled := os.Getenv("LLAMACTL_VLLM_DOCKER_ENABLED"); vllmDockerEnabled != "" {
if b, err := strconv.ParseBool(vllmDockerEnabled); err == nil {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Enabled = b
}
}
if vllmDockerImage := os.Getenv("LLAMACTL_VLLM_DOCKER_IMAGE"); vllmDockerImage != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Image = vllmDockerImage
}
if vllmDockerArgs := os.Getenv("LLAMACTL_VLLM_DOCKER_ARGS"); vllmDockerArgs != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Args = strings.Split(vllmDockerArgs, " ")
}
if vllmDockerEnv := os.Getenv("LLAMACTL_VLLM_DOCKER_ENV"); vllmDockerEnv != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
if cfg.Backends.VLLM.Docker.Environment == nil {
cfg.Backends.VLLM.Docker.Environment = make(map[string]string)
}
parseEnvVars(vllmDockerEnv, cfg.Backends.VLLM.Docker.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_VLLM_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.VLLM.ResponseHeaders == nil {
cfg.Backends.VLLM.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.VLLM.ResponseHeaders)
}
// MLX backend
if mlxCmd := os.Getenv("LLAMACTL_MLX_COMMAND"); mlxCmd != "" {
cfg.Backends.MLX.Command = mlxCmd
}
if mlxArgs := os.Getenv("LLAMACTL_MLX_ARGS"); mlxArgs != "" {
cfg.Backends.MLX.Args = strings.Split(mlxArgs, " ")
}
if mlxEnv := os.Getenv("LLAMACTL_MLX_ENV"); mlxEnv != "" {
if cfg.Backends.MLX.Environment == nil {
cfg.Backends.MLX.Environment = make(map[string]string)
}
parseEnvVars(mlxEnv, cfg.Backends.MLX.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_MLX_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.MLX.ResponseHeaders == nil {
cfg.Backends.MLX.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.MLX.ResponseHeaders)
}
// Instance defaults
if autoRestart := os.Getenv("LLAMACTL_DEFAULT_AUTO_RESTART"); autoRestart != "" {
if b, err := strconv.ParseBool(autoRestart); err == nil {
cfg.Instances.DefaultAutoRestart = b
}
}
if maxRestarts := os.Getenv("LLAMACTL_DEFAULT_MAX_RESTARTS"); maxRestarts != "" {
if m, err := strconv.Atoi(maxRestarts); err == nil {
cfg.Instances.DefaultMaxRestarts = m
}
}
if restartDelay := os.Getenv("LLAMACTL_DEFAULT_RESTART_DELAY"); restartDelay != "" {
if seconds, err := strconv.Atoi(restartDelay); err == nil {
cfg.Instances.DefaultRestartDelay = seconds
}
}
if onDemandStart := os.Getenv("LLAMACTL_DEFAULT_ON_DEMAND_START"); onDemandStart != "" {
if b, err := strconv.ParseBool(onDemandStart); err == nil {
cfg.Instances.DefaultOnDemandStart = b
}
}
if onDemandTimeout := os.Getenv("LLAMACTL_ON_DEMAND_START_TIMEOUT"); onDemandTimeout != "" {
if seconds, err := strconv.Atoi(onDemandTimeout); err == nil {
cfg.Instances.OnDemandStartTimeout = seconds
}
}
if timeoutCheckInterval := os.Getenv("LLAMACTL_TIMEOUT_CHECK_INTERVAL"); timeoutCheckInterval != "" {
if minutes, err := strconv.Atoi(timeoutCheckInterval); err == nil {
cfg.Instances.TimeoutCheckInterval = minutes
}
}
// Auth config
if requireInferenceAuth := os.Getenv("LLAMACTL_REQUIRE_INFERENCE_AUTH"); requireInferenceAuth != "" {
if b, err := strconv.ParseBool(requireInferenceAuth); err == nil {
cfg.Auth.RequireInferenceAuth = b
}
}
if inferenceKeys := os.Getenv("LLAMACTL_INFERENCE_KEYS"); inferenceKeys != "" {
cfg.Auth.InferenceKeys = strings.Split(inferenceKeys, ",")
}
if requireManagementAuth := os.Getenv("LLAMACTL_REQUIRE_MANAGEMENT_AUTH"); requireManagementAuth != "" {
if b, err := strconv.ParseBool(requireManagementAuth); err == nil {
cfg.Auth.RequireManagementAuth = b
}
}
if managementKeys := os.Getenv("LLAMACTL_MANAGEMENT_KEYS"); managementKeys != "" {
cfg.Auth.ManagementKeys = strings.Split(managementKeys, ",")
}
// Local node config
if localNode := os.Getenv("LLAMACTL_LOCAL_NODE"); localNode != "" {
cfg.LocalNode = localNode
}
// Database config
if dbPath := os.Getenv("LLAMACTL_DATABASE_PATH"); dbPath != "" {
cfg.Database.Path = dbPath
}
if maxOpenConns := os.Getenv("LLAMACTL_DATABASE_MAX_OPEN_CONNECTIONS"); maxOpenConns != "" {
if m, err := strconv.Atoi(maxOpenConns); err == nil {
cfg.Database.MaxOpenConnections = m
}
}
if maxIdleConns := os.Getenv("LLAMACTL_DATABASE_MAX_IDLE_CONNECTIONS"); maxIdleConns != "" {
if m, err := strconv.Atoi(maxIdleConns); err == nil {
cfg.Database.MaxIdleConnections = m
}
}
if connMaxLifetime := os.Getenv("LLAMACTL_DATABASE_CONN_MAX_LIFETIME"); connMaxLifetime != "" {
if d, err := time.ParseDuration(connMaxLifetime); err == nil {
cfg.Database.ConnMaxLifetime = d
}
}
}
// ParsePortRange parses port range from string formats like "8000-9000" or "8000,9000"
func ParsePortRange(s string) [2]int {
var parts []string
// Try both separators
if strings.Contains(s, "-") {
parts = strings.Split(s, "-")
} else if strings.Contains(s, ",") {
parts = strings.Split(s, ",")
}
// Parse the two parts
if len(parts) == 2 {
start, err1 := strconv.Atoi(strings.TrimSpace(parts[0]))
end, err2 := strconv.Atoi(strings.TrimSpace(parts[1]))
if err1 == nil && err2 == nil {
return [2]int{start, end}
}
}
return [2]int{0, 0} // Invalid format
}
// parseEnvVars parses environment variables in format "KEY1=value1,KEY2=value2"
// and populates the provided environment map
func parseEnvVars(envString string, envMap map[string]string) {
if envString == "" {
return
}
for _, envPair := range strings.Split(envString, ",") {
if parts := strings.SplitN(strings.TrimSpace(envPair), "=", 2); len(parts) == 2 {
envMap[parts[0]] = parts[1]
}
}
}
// parseHeaders parses HTTP headers in format "KEY1=value1;KEY2=value2"
// and populates the provided environment map
func parseHeaders(envString string, envMap map[string]string) {
if envString == "" {
return
}
for _, envPair := range strings.Split(envString, ";") {
if parts := strings.SplitN(strings.TrimSpace(envPair), "=", 2); len(parts) == 2 {
envMap[parts[0]] = parts[1]
}
}
}
// getDefaultDataDirectory returns platform-specific default data directory
func getDefaultDataDirectory() string {
switch runtime.GOOS {
case "windows":
// Try PROGRAMDATA first (system-wide), fallback to LOCALAPPDATA (user)
if programData := os.Getenv("PROGRAMDATA"); programData != "" {
return filepath.Join(programData, "llamactl")
}
if localAppData := os.Getenv("LOCALAPPDATA"); localAppData != "" {
return filepath.Join(localAppData, "llamactl")
}
return "C:\\ProgramData\\llamactl" // Final fallback
case "darwin":
// For macOS, use user's Application Support directory
if homeDir, _ := os.UserHomeDir(); homeDir != "" {
return filepath.Join(homeDir, "Library", "Application Support", "llamactl")
}
return "/usr/local/var/llamactl" // Fallback
default:
// Linux and other Unix-like systems
if homeDir, _ := os.UserHomeDir(); homeDir != "" {
return filepath.Join(homeDir, ".local", "share", "llamactl")
}
return "/var/lib/llamactl" // Final fallback
}
}
// getDefaultConfigLocations returns platform-specific config file locations
func getDefaultConfigLocations() []string {
var locations []string
// Use ./llamactl.yaml and ./config.yaml as the default config file
locations = append(locations, "llamactl.yaml")
locations = append(locations, "config.yaml")
homeDir, _ := os.UserHomeDir()
switch runtime.GOOS {
case "windows":
// Windows: Use APPDATA if available, else user home, fallback to ProgramData
if appData := os.Getenv("APPDATA"); appData != "" {
locations = append(locations, filepath.Join(appData, "llamactl", "config.yaml"))
} else if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, "llamactl", "config.yaml"))
}
locations = append(locations, filepath.Join(os.Getenv("PROGRAMDATA"), "llamactl", "config.yaml"))
case "darwin":
// macOS: Use Application Support in user home, fallback to /Library/Application Support
if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, "Library", "Application Support", "llamactl", "config.yaml"))
}
locations = append(locations, "/Library/Application Support/llamactl/config.yaml")
default:
// Linux/Unix: Use ~/.config/llamactl/config.yaml, fallback to /etc/llamactl/config.yaml
if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, ".config", "llamactl", "config.yaml"))
}
locations = append(locations, "/etc/llamactl/config.yaml")
}
return locations
}
// SanitizedCopy returns a copy of the AppConfig with sensitive information removed
func (cfg *AppConfig) SanitizedCopy() (AppConfig, error) {
// Deep copy via JSON marshal/unmarshal to avoid concurrent map access

View File

@@ -78,8 +78,8 @@ server:
port: 9090
instances:
port_range: [7000, 8000]
logs_dir: "/custom/logs"
max_instances: 5
logs_dir: "/custom/logs"
llama_executable: "/usr/bin/llama-server"
default_auto_restart: false
default_max_restarts: 10
@@ -219,7 +219,6 @@ instances:
}
}
func TestParsePortRange(t *testing.T) {
tests := []struct {
name string
@@ -248,7 +247,6 @@ func TestParsePortRange(t *testing.T) {
}
}
func TestGetBackendSettings_NewStructuredConfig(t *testing.T) {
bc := &config.BackendConfig{
LlamaCpp: config.BackendSettings{
@@ -305,7 +303,6 @@ func TestGetBackendSettings_NewStructuredConfig(t *testing.T) {
}
}
func TestLoadConfig_BackendEnvironmentVariables(t *testing.T) {
// Test that backend environment variables work correctly
envVars := map[string]string{
@@ -375,7 +372,6 @@ func TestLoadConfig_BackendEnvironmentVariables(t *testing.T) {
}
}
func TestLoadConfig_LocalNode(t *testing.T) {
t.Run("default local node", func(t *testing.T) {
cfg, err := config.LoadConfig("nonexistent-file.yaml")

154
pkg/config/defaults.go Normal file
View File

@@ -0,0 +1,154 @@
package config
import (
"os"
"path/filepath"
"runtime"
"time"
)
func getDefaultConfig(dataDir string) AppConfig {
return AppConfig{
Server: ServerConfig{
Host: "0.0.0.0",
Port: 8080,
AllowedOrigins: []string{"*"}, // Default to allow all origins
AllowedHeaders: []string{"*"}, // Default to allow all headers
EnableSwagger: false,
},
LocalNode: "main",
Nodes: map[string]NodeConfig{},
DataDir: dataDir,
Backends: BackendConfig{
LlamaCpp: BackendSettings{
Command: "llama-server",
Args: []string{},
Environment: map[string]string{},
Docker: &DockerSettings{
Enabled: false,
Image: "ghcr.io/ggml-org/llama.cpp:server",
Args: []string{
"run", "--rm", "--network", "host", "--gpus", "all",
"-v", filepath.Join(dataDir, "llama.cpp") + ":/root/.cache/llama.cpp"},
Environment: map[string]string{},
},
},
VLLM: BackendSettings{
Command: "vllm",
Args: []string{"serve"},
Docker: &DockerSettings{
Enabled: false,
Image: "vllm/vllm-openai:latest",
Args: []string{
"run", "--rm", "--network", "host", "--gpus", "all", "--shm-size", "1g",
"-v", filepath.Join(dataDir, "huggingface") + ":/root/.cache/huggingface",
},
Environment: map[string]string{},
},
},
MLX: BackendSettings{
Command: "mlx_lm.server",
Args: []string{},
// No Docker section for MLX - not supported
},
},
Instances: InstancesConfig{
PortRange: [2]int{8000, 9000},
// NOTE: empty string is set as placeholder value since InstancesDir
// should be relative path to DataDir if not explicitly set.
InstancesDir: "",
AutoCreateDirs: true,
MaxInstances: -1, // -1 means unlimited
MaxRunningInstances: -1, // -1 means unlimited
EnableLRUEviction: true,
DefaultAutoRestart: true,
DefaultMaxRestarts: 3,
DefaultRestartDelay: 5,
DefaultOnDemandStart: true,
OnDemandStartTimeout: 120, // 2 minutes
TimeoutCheckInterval: 5, // Check timeouts every 5 minutes
LogsDir: "", // Will be set to data_dir/logs if empty
LogRotationEnabled: true,
LogRotationMaxSize: 100,
LogRotationCompress: false,
},
Database: DatabaseConfig{
Path: "", // Will be set to data_dir/llamactl.db if empty
MaxOpenConnections: 25,
MaxIdleConnections: 5,
ConnMaxLifetime: 5 * time.Minute,
},
Auth: AuthConfig{
RequireInferenceAuth: true,
InferenceKeys: []string{},
RequireManagementAuth: true,
ManagementKeys: []string{},
},
}
}
// getDefaultDataDir returns platform-specific default data directory
func getDefaultDataDir() string {
switch runtime.GOOS {
case "windows":
// Try PROGRAMDATA first (system-wide), fallback to LOCALAPPDATA (user)
if programData := os.Getenv("PROGRAMDATA"); programData != "" {
return filepath.Join(programData, "llamactl")
}
if localAppData := os.Getenv("LOCALAPPDATA"); localAppData != "" {
return filepath.Join(localAppData, "llamactl")
}
return "C:\\ProgramData\\llamactl" // Final fallback
case "darwin":
// For macOS, use user's Application Support directory
if homeDir, _ := os.UserHomeDir(); homeDir != "" {
return filepath.Join(homeDir, "Library", "Application Support", "llamactl")
}
return "/usr/local/var/llamactl" // Fallback
default:
// Linux and other Unix-like systems
if homeDir, _ := os.UserHomeDir(); homeDir != "" {
return filepath.Join(homeDir, ".local", "share", "llamactl")
}
return "/var/lib/llamactl" // Final fallback
}
}
// getDefaultConfigLocations returns platform-specific config file locations
func getDefaultConfigLocations() []string {
var locations []string
// Use ./llamactl.yaml and ./config.yaml as the default config file
locations = append(locations, "llamactl.yaml")
locations = append(locations, "config.yaml")
homeDir, _ := os.UserHomeDir()
switch runtime.GOOS {
case "windows":
// Windows: Use APPDATA if available, else user home, fallback to ProgramData
if appData := os.Getenv("APPDATA"); appData != "" {
locations = append(locations, filepath.Join(appData, "llamactl", "config.yaml"))
} else if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, "llamactl", "config.yaml"))
}
locations = append(locations, filepath.Join(os.Getenv("PROGRAMDATA"), "llamactl", "config.yaml"))
case "darwin":
// macOS: Use Application Support in user home, fallback to /Library/Application Support
if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, "Library", "Application Support", "llamactl", "config.yaml"))
}
locations = append(locations, "/Library/Application Support/llamactl/config.yaml")
default:
// Linux/Unix: Use ~/.config/llamactl/config.yaml, fallback to /etc/llamactl/config.yaml
if homeDir != "" {
locations = append(locations, filepath.Join(homeDir, ".config", "llamactl", "config.yaml"))
}
locations = append(locations, "/etc/llamactl/config.yaml")
}
return locations
}

325
pkg/config/env.go Normal file
View File

@@ -0,0 +1,325 @@
package config
import (
"os"
"strconv"
"strings"
"time"
)
// loadEnvVars overrides config with environment variables
func loadEnvVars(cfg *AppConfig) {
// Server config
if host := os.Getenv("LLAMACTL_HOST"); host != "" {
cfg.Server.Host = host
}
if port := os.Getenv("LLAMACTL_PORT"); port != "" {
if p, err := strconv.Atoi(port); err == nil {
cfg.Server.Port = p
}
}
if allowedOrigins := os.Getenv("LLAMACTL_ALLOWED_ORIGINS"); allowedOrigins != "" {
cfg.Server.AllowedOrigins = strings.Split(allowedOrigins, ",")
}
if enableSwagger := os.Getenv("LLAMACTL_ENABLE_SWAGGER"); enableSwagger != "" {
if b, err := strconv.ParseBool(enableSwagger); err == nil {
cfg.Server.EnableSwagger = b
}
}
// Data config
if dataDir := os.Getenv("LLAMACTL_DATA_DIRECTORY"); dataDir != "" {
cfg.DataDir = dataDir
}
if instancesDir := os.Getenv("LLAMACTL_INSTANCES_DIR"); instancesDir != "" {
cfg.Instances.InstancesDir = instancesDir
}
if logsDir := os.Getenv("LLAMACTL_LOGS_DIR"); logsDir != "" {
cfg.Instances.LogsDir = logsDir
}
if autoCreate := os.Getenv("LLAMACTL_AUTO_CREATE_DATA_DIR"); autoCreate != "" {
if b, err := strconv.ParseBool(autoCreate); err == nil {
cfg.Instances.AutoCreateDirs = b
}
}
// Instance config
if portRange := os.Getenv("LLAMACTL_INSTANCE_PORT_RANGE"); portRange != "" {
if ports := ParsePortRange(portRange); ports != [2]int{0, 0} {
cfg.Instances.PortRange = ports
}
}
if maxInstances := os.Getenv("LLAMACTL_MAX_INSTANCES"); maxInstances != "" {
if m, err := strconv.Atoi(maxInstances); err == nil {
cfg.Instances.MaxInstances = m
}
}
if maxRunning := os.Getenv("LLAMACTL_MAX_RUNNING_INSTANCES"); maxRunning != "" {
if m, err := strconv.Atoi(maxRunning); err == nil {
cfg.Instances.MaxRunningInstances = m
}
}
if enableLRUEviction := os.Getenv("LLAMACTL_ENABLE_LRU_EVICTION"); enableLRUEviction != "" {
if b, err := strconv.ParseBool(enableLRUEviction); err == nil {
cfg.Instances.EnableLRUEviction = b
}
}
// Backend config
// LlamaCpp backend
if llamaCmd := os.Getenv("LLAMACTL_LLAMACPP_COMMAND"); llamaCmd != "" {
cfg.Backends.LlamaCpp.Command = llamaCmd
}
if llamaArgs := os.Getenv("LLAMACTL_LLAMACPP_ARGS"); llamaArgs != "" {
cfg.Backends.LlamaCpp.Args = strings.Split(llamaArgs, " ")
}
if llamaEnv := os.Getenv("LLAMACTL_LLAMACPP_ENV"); llamaEnv != "" {
if cfg.Backends.LlamaCpp.Environment == nil {
cfg.Backends.LlamaCpp.Environment = make(map[string]string)
}
parseEnvVars(llamaEnv, cfg.Backends.LlamaCpp.Environment)
}
if llamaDockerEnabled := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ENABLED"); llamaDockerEnabled != "" {
if b, err := strconv.ParseBool(llamaDockerEnabled); err == nil {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Enabled = b
}
}
if llamaDockerImage := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_IMAGE"); llamaDockerImage != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Image = llamaDockerImage
}
if llamaDockerArgs := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ARGS"); llamaDockerArgs != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
cfg.Backends.LlamaCpp.Docker.Args = strings.Split(llamaDockerArgs, " ")
}
if llamaDockerEnv := os.Getenv("LLAMACTL_LLAMACPP_DOCKER_ENV"); llamaDockerEnv != "" {
if cfg.Backends.LlamaCpp.Docker == nil {
cfg.Backends.LlamaCpp.Docker = &DockerSettings{}
}
if cfg.Backends.LlamaCpp.Docker.Environment == nil {
cfg.Backends.LlamaCpp.Docker.Environment = make(map[string]string)
}
parseEnvVars(llamaDockerEnv, cfg.Backends.LlamaCpp.Docker.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_LLAMACPP_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.LlamaCpp.ResponseHeaders == nil {
cfg.Backends.LlamaCpp.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.LlamaCpp.ResponseHeaders)
}
// vLLM backend
if vllmCmd := os.Getenv("LLAMACTL_VLLM_COMMAND"); vllmCmd != "" {
cfg.Backends.VLLM.Command = vllmCmd
}
if vllmArgs := os.Getenv("LLAMACTL_VLLM_ARGS"); vllmArgs != "" {
cfg.Backends.VLLM.Args = strings.Split(vllmArgs, " ")
}
if vllmEnv := os.Getenv("LLAMACTL_VLLM_ENV"); vllmEnv != "" {
if cfg.Backends.VLLM.Environment == nil {
cfg.Backends.VLLM.Environment = make(map[string]string)
}
parseEnvVars(vllmEnv, cfg.Backends.VLLM.Environment)
}
if vllmDockerEnabled := os.Getenv("LLAMACTL_VLLM_DOCKER_ENABLED"); vllmDockerEnabled != "" {
if b, err := strconv.ParseBool(vllmDockerEnabled); err == nil {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Enabled = b
}
}
if vllmDockerImage := os.Getenv("LLAMACTL_VLLM_DOCKER_IMAGE"); vllmDockerImage != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Image = vllmDockerImage
}
if vllmDockerArgs := os.Getenv("LLAMACTL_VLLM_DOCKER_ARGS"); vllmDockerArgs != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
cfg.Backends.VLLM.Docker.Args = strings.Split(vllmDockerArgs, " ")
}
if vllmDockerEnv := os.Getenv("LLAMACTL_VLLM_DOCKER_ENV"); vllmDockerEnv != "" {
if cfg.Backends.VLLM.Docker == nil {
cfg.Backends.VLLM.Docker = &DockerSettings{}
}
if cfg.Backends.VLLM.Docker.Environment == nil {
cfg.Backends.VLLM.Docker.Environment = make(map[string]string)
}
parseEnvVars(vllmDockerEnv, cfg.Backends.VLLM.Docker.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_VLLM_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.VLLM.ResponseHeaders == nil {
cfg.Backends.VLLM.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.VLLM.ResponseHeaders)
}
// MLX backend
if mlxCmd := os.Getenv("LLAMACTL_MLX_COMMAND"); mlxCmd != "" {
cfg.Backends.MLX.Command = mlxCmd
}
if mlxArgs := os.Getenv("LLAMACTL_MLX_ARGS"); mlxArgs != "" {
cfg.Backends.MLX.Args = strings.Split(mlxArgs, " ")
}
if mlxEnv := os.Getenv("LLAMACTL_MLX_ENV"); mlxEnv != "" {
if cfg.Backends.MLX.Environment == nil {
cfg.Backends.MLX.Environment = make(map[string]string)
}
parseEnvVars(mlxEnv, cfg.Backends.MLX.Environment)
}
if llamaEnv := os.Getenv("LLAMACTL_MLX_RESPONSE_HEADERS"); llamaEnv != "" {
if cfg.Backends.MLX.ResponseHeaders == nil {
cfg.Backends.MLX.ResponseHeaders = make(map[string]string)
}
parseHeaders(llamaEnv, cfg.Backends.MLX.ResponseHeaders)
}
// Instance defaults
if autoRestart := os.Getenv("LLAMACTL_DEFAULT_AUTO_RESTART"); autoRestart != "" {
if b, err := strconv.ParseBool(autoRestart); err == nil {
cfg.Instances.DefaultAutoRestart = b
}
}
if maxRestarts := os.Getenv("LLAMACTL_DEFAULT_MAX_RESTARTS"); maxRestarts != "" {
if m, err := strconv.Atoi(maxRestarts); err == nil {
cfg.Instances.DefaultMaxRestarts = m
}
}
if restartDelay := os.Getenv("LLAMACTL_DEFAULT_RESTART_DELAY"); restartDelay != "" {
if seconds, err := strconv.Atoi(restartDelay); err == nil {
cfg.Instances.DefaultRestartDelay = seconds
}
}
if onDemandStart := os.Getenv("LLAMACTL_DEFAULT_ON_DEMAND_START"); onDemandStart != "" {
if b, err := strconv.ParseBool(onDemandStart); err == nil {
cfg.Instances.DefaultOnDemandStart = b
}
}
if onDemandTimeout := os.Getenv("LLAMACTL_ON_DEMAND_START_TIMEOUT"); onDemandTimeout != "" {
if seconds, err := strconv.Atoi(onDemandTimeout); err == nil {
cfg.Instances.OnDemandStartTimeout = seconds
}
}
if timeoutCheckInterval := os.Getenv("LLAMACTL_TIMEOUT_CHECK_INTERVAL"); timeoutCheckInterval != "" {
if minutes, err := strconv.Atoi(timeoutCheckInterval); err == nil {
cfg.Instances.TimeoutCheckInterval = minutes
}
}
// Auth config
if requireInferenceAuth := os.Getenv("LLAMACTL_REQUIRE_INFERENCE_AUTH"); requireInferenceAuth != "" {
if b, err := strconv.ParseBool(requireInferenceAuth); err == nil {
cfg.Auth.RequireInferenceAuth = b
}
}
if inferenceKeys := os.Getenv("LLAMACTL_INFERENCE_KEYS"); inferenceKeys != "" {
cfg.Auth.InferenceKeys = strings.Split(inferenceKeys, ",")
}
if requireManagementAuth := os.Getenv("LLAMACTL_REQUIRE_MANAGEMENT_AUTH"); requireManagementAuth != "" {
if b, err := strconv.ParseBool(requireManagementAuth); err == nil {
cfg.Auth.RequireManagementAuth = b
}
}
if managementKeys := os.Getenv("LLAMACTL_MANAGEMENT_KEYS"); managementKeys != "" {
cfg.Auth.ManagementKeys = strings.Split(managementKeys, ",")
}
// Local node config
if localNode := os.Getenv("LLAMACTL_LOCAL_NODE"); localNode != "" {
cfg.LocalNode = localNode
}
// Database config
if dbPath := os.Getenv("LLAMACTL_DATABASE_PATH"); dbPath != "" {
cfg.Database.Path = dbPath
}
if maxOpenConns := os.Getenv("LLAMACTL_DATABASE_MAX_OPEN_CONNECTIONS"); maxOpenConns != "" {
if m, err := strconv.Atoi(maxOpenConns); err == nil {
cfg.Database.MaxOpenConnections = m
}
}
if maxIdleConns := os.Getenv("LLAMACTL_DATABASE_MAX_IDLE_CONNECTIONS"); maxIdleConns != "" {
if m, err := strconv.Atoi(maxIdleConns); err == nil {
cfg.Database.MaxIdleConnections = m
}
}
if connMaxLifetime := os.Getenv("LLAMACTL_DATABASE_CONN_MAX_LIFETIME"); connMaxLifetime != "" {
if d, err := time.ParseDuration(connMaxLifetime); err == nil {
cfg.Database.ConnMaxLifetime = d
}
}
// Log rotation config
if logRotationEnabled := os.Getenv("LLAMACTL_LOG_ROTATION_ENABLED"); logRotationEnabled != "" {
if b, err := strconv.ParseBool(logRotationEnabled); err == nil {
cfg.Instances.LogRotationEnabled = b
}
}
if logRotationMaxSize := os.Getenv("LLAMACTL_LOG_ROTATION_MAX_SIZE"); logRotationMaxSize != "" {
if m, err := strconv.Atoi(logRotationMaxSize); err == nil {
cfg.Instances.LogRotationMaxSize = m
}
}
if logRotationCompress := os.Getenv("LLAMACTL_LOG_ROTATION_COMPRESS"); logRotationCompress != "" {
if b, err := strconv.ParseBool(logRotationCompress); err == nil {
cfg.Instances.LogRotationCompress = b
}
}
}
// ParsePortRange parses port range from string formats like "8000-9000" or "8000,9000"
func ParsePortRange(s string) [2]int {
var parts []string
// Try both separators
if strings.Contains(s, "-") {
parts = strings.Split(s, "-")
} else if strings.Contains(s, ",") {
parts = strings.Split(s, ",")
}
// Parse the two parts
if len(parts) == 2 {
start, err1 := strconv.Atoi(strings.TrimSpace(parts[0]))
end, err2 := strconv.Atoi(strings.TrimSpace(parts[1]))
if err1 == nil && err2 == nil {
return [2]int{start, end}
}
}
return [2]int{0, 0} // Invalid format
}
// parseEnvVars parses environment variables in format "KEY1=value1,KEY2=value2"
// and populates the provided environment map
func parseEnvVars(envString string, envMap map[string]string) {
if envString == "" {
return
}
for _, envPair := range strings.Split(envString, ",") {
if parts := strings.SplitN(strings.TrimSpace(envPair), "=", 2); len(parts) == 2 {
envMap[parts[0]] = parts[1]
}
}
}
// parseHeaders parses HTTP headers in format "KEY1=value1;KEY2=value2"
// and populates the provided environment map
func parseHeaders(envString string, envMap map[string]string) {
if envString == "" {
return
}
for _, envPair := range strings.Split(envString, ";") {
if parts := strings.SplitN(strings.TrimSpace(envPair), "=", 2); len(parts) == 2 {
envMap[parts[0]] = parts[1]
}
}
}

149
pkg/config/types.go Normal file
View File

@@ -0,0 +1,149 @@
package config
import "time"
// BackendSettings contains structured backend configuration
type BackendSettings struct {
Command string `yaml:"command" json:"command"`
Args []string `yaml:"args" json:"args"`
Environment map[string]string `yaml:"environment,omitempty" json:"environment,omitempty"`
Docker *DockerSettings `yaml:"docker,omitempty" json:"docker,omitempty"`
ResponseHeaders map[string]string `yaml:"response_headers,omitempty" json:"response_headers,omitempty"`
}
// DockerSettings contains Docker-specific configuration
type DockerSettings struct {
Enabled bool `yaml:"enabled" json:"enabled"`
Image string `yaml:"image" json:"image"`
Args []string `yaml:"args" json:"args"`
Environment map[string]string `yaml:"environment,omitempty" json:"environment,omitempty"`
}
// BackendConfig contains backend executable configurations
type BackendConfig struct {
LlamaCpp BackendSettings `yaml:"llama-cpp" json:"llama-cpp"`
VLLM BackendSettings `yaml:"vllm" json:"vllm"`
MLX BackendSettings `yaml:"mlx" json:"mlx"`
}
// AppConfig represents the configuration for llamactl
type AppConfig struct {
Server ServerConfig `yaml:"server" json:"server"`
Backends BackendConfig `yaml:"backends" json:"backends"`
Instances InstancesConfig `yaml:"instances" json:"instances"`
Database DatabaseConfig `yaml:"database" json:"database"`
Auth AuthConfig `yaml:"auth" json:"auth"`
LocalNode string `yaml:"local_node,omitempty" json:"local_node,omitempty"`
Nodes map[string]NodeConfig `yaml:"nodes,omitempty" json:"nodes,omitempty"`
// Directory where all llamactl data will be stored (database, instances, logs, etc.)
DataDir string `yaml:"data_dir" json:"data_dir"`
Version string `yaml:"-" json:"version"`
CommitHash string `yaml:"-" json:"commit_hash"`
BuildTime string `yaml:"-" json:"build_time"`
}
// ServerConfig contains HTTP server configuration
type ServerConfig struct {
// Server host to bind to
Host string `yaml:"host" json:"host"`
// Server port to bind to
Port int `yaml:"port" json:"port"`
// Allowed origins for CORS (e.g., "http://localhost:3000")
AllowedOrigins []string `yaml:"allowed_origins" json:"allowed_origins"`
// Allowed headers for CORS (e.g., "Accept", "Authorization", "Content-Type", "X-CSRF-Token")
AllowedHeaders []string `yaml:"allowed_headers" json:"allowed_headers"`
// Enable Swagger UI for API documentation
EnableSwagger bool `yaml:"enable_swagger" json:"enable_swagger"`
// Response headers to send with responses
ResponseHeaders map[string]string `yaml:"response_headers,omitempty" json:"response_headers,omitempty"`
}
// DatabaseConfig contains database configuration settings
type DatabaseConfig struct {
// Database file path (relative to the top-level data_dir or absolute)
Path string `yaml:"path" json:"path"`
// Connection settings
MaxOpenConnections int `yaml:"max_open_connections" json:"max_open_connections"`
MaxIdleConnections int `yaml:"max_idle_connections" json:"max_idle_connections"`
ConnMaxLifetime time.Duration `yaml:"connection_max_lifetime" json:"connection_max_lifetime" swaggertype:"string" example:"1h"`
}
// InstancesConfig contains instance management configuration
type InstancesConfig struct {
// Port range for instances (e.g., 8000,9000)
PortRange [2]int `yaml:"port_range" json:"port_range"`
// Instance config directory override (relative to data_dir if not absolute)
InstancesDir string `yaml:"configs_dir" json:"configs_dir"`
// Automatically create the data directory if it doesn't exist
AutoCreateDirs bool `yaml:"auto_create_dirs" json:"auto_create_dirs"`
// Maximum number of instances that can be created
MaxInstances int `yaml:"max_instances" json:"max_instances"`
// Maximum number of instances that can be running at the same time
MaxRunningInstances int `yaml:"max_running_instances,omitempty" json:"max_running_instances,omitempty"`
// Enable LRU eviction for instance logs
EnableLRUEviction bool `yaml:"enable_lru_eviction" json:"enable_lru_eviction"`
// Default auto-restart setting for new instances
DefaultAutoRestart bool `yaml:"default_auto_restart" json:"default_auto_restart"`
// Default max restarts for new instances
DefaultMaxRestarts int `yaml:"default_max_restarts" json:"default_max_restarts"`
// Default restart delay for new instances (in seconds)
DefaultRestartDelay int `yaml:"default_restart_delay" json:"default_restart_delay"`
// Default on-demand start setting for new instances
DefaultOnDemandStart bool `yaml:"default_on_demand_start" json:"default_on_demand_start"`
// How long to wait for an instance to start on demand (in seconds)
OnDemandStartTimeout int `yaml:"on_demand_start_timeout,omitempty" json:"on_demand_start_timeout,omitempty"`
// Interval for checking instance timeouts (in minutes)
TimeoutCheckInterval int `yaml:"timeout_check_interval" json:"timeout_check_interval"`
// Logs directory override (relative to data_dir if not absolute)
LogsDir string `yaml:"logs_dir" json:"logs_dir"`
// Log rotation enabled
LogRotationEnabled bool `yaml:"log_rotation_enabled" default:"true"`
// Maximum log file size in MB before rotation
LogRotationMaxSize int `yaml:"log_rotation_max_size" default:"100"`
// Whether to compress rotated log files
LogRotationCompress bool `yaml:"log_rotation_compress" default:"false"`
}
// AuthConfig contains authentication settings
type AuthConfig struct {
// Require authentication for OpenAI compatible inference endpoints
RequireInferenceAuth bool `yaml:"require_inference_auth" json:"require_inference_auth"`
// List of keys for OpenAI compatible inference endpoints
InferenceKeys []string `yaml:"inference_keys" json:"inference_keys"`
// Require authentication for management endpoints
RequireManagementAuth bool `yaml:"require_management_auth" json:"require_management_auth"`
// List of keys for management endpoints
ManagementKeys []string `yaml:"management_keys" json:"management_keys"`
}
type NodeConfig struct {
Address string `yaml:"address" json:"address"`
APIKey string `yaml:"api_key,omitempty" json:"api_key,omitempty"`
}

View File

@@ -3,10 +3,12 @@ package instance
import (
"encoding/json"
"fmt"
"llamactl/pkg/config"
"log"
"net/http"
"time"
"llamactl/pkg/backends"
"llamactl/pkg/config"
)
// Instance represents a running instance of llama server
@@ -68,7 +70,16 @@ func New(name string, globalConfig *config.AppConfig, opts *Options, onStatusCha
// Only create logger, proxy, and process for local instances
if !instance.IsRemote() {
instance.logger = newLogger(name, globalInstanceSettings.LogsDir)
logRotationConfig := &LogRotationConfig{
Enabled: globalInstanceSettings.LogRotationEnabled,
MaxSize: globalInstanceSettings.LogRotationMaxSize,
Compress: globalInstanceSettings.LogRotationCompress,
}
instance.logger = newLogger(
name,
globalInstanceSettings.LogsDir,
logRotationConfig,
)
instance.process = newProcess(instance)
}
@@ -107,6 +118,14 @@ func (i *Instance) WaitForHealthy(timeout int) error {
return i.process.waitForHealthy(timeout)
}
func (i *Instance) GetBackendType() backends.BackendType {
opts := i.GetOptions()
if opts == nil {
return backends.BackendTypeUnknown
}
return opts.BackendOptions.BackendType
}
// GetOptions returns the current options
func (i *Instance) GetOptions() *Options {
if i.options == nil {

View File

@@ -27,8 +27,8 @@ func TestNewInstance(t *testing.T) {
},
},
Instances: config.InstancesConfig{
LogsDir: "/tmp/test",
DefaultAutoRestart: true,
LogsDir: "/tmp/test",
DefaultMaxRestarts: 3,
DefaultRestartDelay: 5,
},
@@ -120,8 +120,8 @@ func TestSetOptions(t *testing.T) {
},
},
Instances: config.InstancesConfig{
LogsDir: "/tmp/test",
DefaultAutoRestart: true,
LogsDir: "/tmp/test",
DefaultMaxRestarts: 3,
DefaultRestartDelay: 5,
},

View File

@@ -7,66 +7,117 @@ import (
"os"
"strings"
"sync"
"sync/atomic"
"time"
timber "github.com/DeRuina/timberjack"
)
// LogRotationConfig contains log rotation settings for instances
type LogRotationConfig struct {
Enabled bool
MaxSize int
Compress bool
}
type logger struct {
name string
logDir string
logFile atomic.Pointer[os.File]
logFile *timber.Logger
logFilePath string
mu sync.RWMutex
cfg *LogRotationConfig
}
func newLogger(name string, logDir string) *logger {
func newLogger(name, logDir string, cfg *LogRotationConfig) *logger {
return &logger{
name: name,
logDir: logDir,
cfg: cfg,
}
}
// create creates and opens the log files for stdout and stderr
func (i *logger) create() error {
i.mu.Lock()
defer i.mu.Unlock()
func (l *logger) create() error {
l.mu.Lock()
defer l.mu.Unlock()
if i.logDir == "" {
return fmt.Errorf("logDir is empty for instance %s", i.name)
if l.logDir == "" {
return fmt.Errorf("logDir empty for instance %s", l.name)
}
// Set up instance logs
logPath := i.logDir + "/" + i.name + ".log"
i.logFilePath = logPath
if err := os.MkdirAll(i.logDir, 0755); err != nil {
if err := os.MkdirAll(l.logDir, 0755); err != nil {
return fmt.Errorf("failed to create log directory: %w", err)
}
logFile, err := os.OpenFile(logPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
if err != nil {
return fmt.Errorf("failed to create stdout log file: %w", err)
logPath := fmt.Sprintf("%s/%s.log", l.logDir, l.name)
l.logFilePath = logPath
// Build the timber logger
t := &timber.Logger{
Filename: logPath,
MaxSize: l.cfg.MaxSize,
MaxBackups: 0, // No limit on backups
// Compression: "gzip" if Compress is true, else "none"
Compression: func() string {
if l.cfg.Compress {
return "gzip"
}
return "none"
}(),
FileMode: 0644,
LocalTime: true,
}
i.logFile.Store(logFile)
// If rotation is disabled, set MaxSize to 0 so no rotation occurs
if !l.cfg.Enabled {
t.MaxSize = 0
}
// Write a startup marker to both files
timestamp := time.Now().Format("2006-01-02 15:04:05")
fmt.Fprintf(logFile, "\n=== Instance %s started at %s ===\n", i.name, timestamp)
l.logFile = t
// Write a startup marker
ts := time.Now().Format("2006-01-02 15:04:05")
fmt.Fprintf(t, "\n=== Instance %s started at %s ===\n", l.name, ts)
return nil
}
// getLogs retrieves the last n lines of logs from the instance
func (i *logger) getLogs(num_lines int) (string, error) {
i.mu.RLock()
defer i.mu.RUnlock()
func (l *logger) readOutput(rc io.ReadCloser) {
defer rc.Close()
scanner := bufio.NewScanner(rc)
for scanner.Scan() {
line := scanner.Text()
if lg := l.logFile; lg != nil {
fmt.Fprintln(lg, line)
}
}
}
if i.logFilePath == "" {
return "", fmt.Errorf("log file not created for instance %s", i.name)
func (l *logger) close() {
l.mu.Lock()
defer l.mu.Unlock()
lg := l.logFile
if lg == nil {
return
}
file, err := os.Open(i.logFilePath)
ts := time.Now().Format("2006-01-02 15:04:05")
fmt.Fprintf(lg, "=== Instance %s stopped at %s ===\n\n", l.name, ts)
_ = lg.Close()
l.logFile = nil
}
// getLogs retrieves the last n lines of logs from the instance
func (l *logger) getLogs(num_lines int) (string, error) {
l.mu.RLock()
defer l.mu.RUnlock()
if l.logFilePath == "" {
return "", fmt.Errorf("log file not created for instance %s", l.name)
}
file, err := os.Open(l.logFilePath)
if err != nil {
return "", fmt.Errorf("failed to open log file: %w", err)
}
@@ -97,31 +148,3 @@ func (i *logger) getLogs(num_lines int) (string, error) {
return strings.Join(lines[start:], "\n"), nil
}
// close closes the log files
func (i *logger) close() {
i.mu.Lock()
defer i.mu.Unlock()
logFile := i.logFile.Swap(nil)
if logFile != nil {
timestamp := time.Now().Format("2006-01-02 15:04:05")
fmt.Fprintf(logFile, "=== Instance %s stopped at %s ===\n\n", i.name, timestamp)
logFile.Sync() // Ensure all buffered data is written to disk
logFile.Close()
}
}
// readOutput reads from the given reader and writes lines to the log file
func (i *logger) readOutput(reader io.ReadCloser) {
defer reader.Close()
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
line := scanner.Text()
// Use atomic load to avoid lock contention on every line
if logFile := i.logFile.Load(); logFile != nil {
fmt.Fprintln(logFile, line)
}
}
}

View File

@@ -19,7 +19,7 @@ type InstanceManager interface {
UpdateInstance(name string, options *instance.Options) (*instance.Instance, error)
DeleteInstance(name string) error
StartInstance(name string) (*instance.Instance, error)
IsMaxRunningInstancesReached() bool
AtMaxRunning() bool
StopInstance(name string) (*instance.Instance, error)
EvictLRUInstance() error
RestartInstance(name string) (*instance.Instance, error)

View File

@@ -203,11 +203,11 @@ func createTestAppConfig(instancesDir string) *config.AppConfig {
Instances: config.InstancesConfig{
PortRange: [2]int{8000, 9000},
InstancesDir: instancesDir,
LogsDir: instancesDir,
MaxInstances: 10,
MaxRunningInstances: 10,
DefaultAutoRestart: true,
DefaultMaxRestarts: 3,
LogsDir: instancesDir,
DefaultRestartDelay: 5,
TimeoutCheckInterval: 5,
},

View File

@@ -383,7 +383,7 @@ func (im *instanceManager) StartInstance(name string) (*instance.Instance, error
}
// Check max running instances limit for local instances only
if im.IsMaxRunningInstancesReached() {
if im.AtMaxRunning() {
return nil, MaxRunningInstancesError(fmt.Errorf("maximum number of running instances (%d) reached", im.globalConfig.Instances.MaxRunningInstances))
}
@@ -399,7 +399,7 @@ func (im *instanceManager) StartInstance(name string) (*instance.Instance, error
return inst, nil
}
func (im *instanceManager) IsMaxRunningInstancesReached() bool {
func (im *instanceManager) AtMaxRunning() bool {
if im.globalConfig.Instances.MaxRunningInstances == -1 {
return false
}

View File

@@ -96,7 +96,7 @@ func (h *Handler) ensureInstanceRunning(inst *instance.Instance) error {
return fmt.Errorf("instance is not running and on-demand start is not enabled")
}
if h.InstanceManager.IsMaxRunningInstancesReached() {
if h.InstanceManager.AtMaxRunning() {
if h.cfg.Instances.EnableLRUEviction {
err := h.InstanceManager.EvictLRUInstance()
if err != nil {

View File

@@ -306,3 +306,158 @@ func (h *Handler) LlamaServerVersionHandler() http.HandlerFunc {
func (h *Handler) LlamaServerListDevicesHandler() http.HandlerFunc {
return h.executeLlamaServerCommand("--list-devices", "Failed to list devices")
}
// LlamaCppListModels godoc
// @Summary List models in a llama.cpp instance
// @Description Returns a list of models available in the specified llama.cpp instance
// @Tags Llama.cpp
// @Security ApiKeyAuth
// @Produces json
// @Param name path string true "Instance Name"
// @Success 200 {object} map[string]any "Models list response"
// @Failure 400 {string} string "Invalid instance"
// @Failure 500 {string} string "Internal Server Error"
// @Router /api/v1/llama-cpp/{name}/models [get]
func (h *Handler) LlamaCppListModels() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
inst, err := h.validateLlamaCppInstance(r)
if err != nil {
writeError(w, http.StatusBadRequest, "invalid instance", err.Error())
return
}
// Check instance permissions
if err := h.authMiddleware.CheckInstancePermission(r.Context(), inst.ID); err != nil {
writeError(w, http.StatusForbidden, "permission_denied", err.Error())
return
}
// Check if instance is shutting down before autostart logic
if inst.GetStatus() == instance.ShuttingDown {
writeError(w, http.StatusServiceUnavailable, "instance_shutting_down", "Instance is shutting down")
return
}
if !inst.IsRemote() && !inst.IsRunning() {
err := h.ensureInstanceRunning(inst)
if err != nil {
writeError(w, http.StatusInternalServerError, "instance start failed", err.Error())
return
}
}
// Modify request path to /models for proxying
r.URL.Path = "/models"
// Use instance's ServeHTTP which tracks inflight requests and handles shutting down state
err = inst.ServeHTTP(w, r)
if err != nil {
// Error is already handled in ServeHTTP (response written)
return
}
}
}
// LlamaCppLoadModel godoc
// @Summary Load a model in a llama.cpp instance
// @Description Loads the specified model in the given llama.cpp instance
// @Tags Llama.cpp
// @Security ApiKeyAuth
// @Produces json
// @Param name path string true "Instance Name"
// @Param model path string true "Model Name"
// @Success 200 {object} map[string]string "Success message"
// @Failure 400 {string} string "Invalid request"
// @Failure 500 {string} string "Internal Server Error"
// @Router /api/v1/llama-cpp/{name}/models/{model}/load [post]
func (h *Handler) LlamaCppLoadModel() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
inst, err := h.validateLlamaCppInstance(r)
if err != nil {
writeError(w, http.StatusBadRequest, "invalid instance", err.Error())
return
}
// Check instance permissions
if err := h.authMiddleware.CheckInstancePermission(r.Context(), inst.ID); err != nil {
writeError(w, http.StatusForbidden, "permission_denied", err.Error())
return
}
// Check if instance is shutting down before autostart logic
if inst.GetStatus() == instance.ShuttingDown {
writeError(w, http.StatusServiceUnavailable, "instance_shutting_down", "Instance is shutting down")
return
}
if !inst.IsRemote() && !inst.IsRunning() {
err := h.ensureInstanceRunning(inst)
if err != nil {
writeError(w, http.StatusInternalServerError, "instance start failed", err.Error())
return
}
}
// Modify request path to /models/load for proxying
r.URL.Path = "/models/load"
// Use instance's ServeHTTP which tracks inflight requests and handles shutting down state
err = inst.ServeHTTP(w, r)
if err != nil {
// Error is already handled in ServeHTTP (response written)
return
}
}
}
// LlamaCppUnloadModel godoc
// @Summary Unload a model in a llama.cpp instance
// @Description Unloads the specified model in the given llama.cpp instance
// @Tags Llama.cpp
// @Security ApiKeyAuth
// @Produces json
// @Param name path string true "Instance Name"
// @Param model path string true "Model Name"
// @Success 200 {object} map[string]string "Success message"
// @Failure 400 {string} string "Invalid request"
// @Failure 500 {string} string "Internal Server Error"
// @Router /api/v1/llama-cpp/{name}/models/{model}/unload [post]
func (h *Handler) LlamaCppUnloadModel() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
inst, err := h.validateLlamaCppInstance(r)
if err != nil {
writeError(w, http.StatusBadRequest, "invalid instance", err.Error())
return
}
// Check instance permissions
if err := h.authMiddleware.CheckInstancePermission(r.Context(), inst.ID); err != nil {
writeError(w, http.StatusForbidden, "permission_denied", err.Error())
return
}
// Check if instance is shutting down before autostart logic
if inst.GetStatus() == instance.ShuttingDown {
writeError(w, http.StatusServiceUnavailable, "instance_shutting_down", "Instance is shutting down")
return
}
if !inst.IsRemote() && !inst.IsRunning() {
err := h.ensureInstanceRunning(inst)
if err != nil {
writeError(w, http.StatusInternalServerError, "instance start failed", err.Error())
return
}
}
// Modify request path to /models/unload for proxying
r.URL.Path = "/models/unload"
// Use instance's ServeHTTP which tracks inflight requests and handles shutting down state
err = inst.ServeHTTP(w, r)
if err != nil {
// Error is already handled in ServeHTTP (response written)
return
}
}
}

View File

@@ -3,10 +3,13 @@ package server
import (
"bytes"
"encoding/json"
"fmt"
"io"
"llamactl/pkg/backends"
"llamactl/pkg/instance"
"llamactl/pkg/validation"
"net/http"
"strings"
)
// OpenAIListInstancesResponse represents the response structure for listing instances (models) in OpenAI-compatible format
@@ -23,6 +26,53 @@ type OpenAIInstance struct {
OwnedBy string `json:"owned_by"`
}
// LlamaCppModel represents a model available in a llama.cpp instance
type LlamaCppModel struct {
ID string `json:"id"`
Object string `json:"object"`
OwnedBy string `json:"owned_by"`
Created int64 `json:"created"`
InCache bool `json:"in_cache"`
Path string `json:"path"`
Status LlamaCppModelStatus `json:"status"`
}
// LlamaCppModelStatus represents the status of a model in a llama.cpp instance
type LlamaCppModelStatus struct {
Value string `json:"value"` // "loaded" | "loading" | "unloaded"
Args []string `json:"args"`
}
// fetchLlamaCppModels fetches models from a llama.cpp instance using the proxy
func fetchLlamaCppModels(inst *instance.Instance) ([]LlamaCppModel, error) {
// Create a request to the instance's /models endpoint
req, err := http.NewRequest("GET", fmt.Sprintf("http://%s:%d/models", inst.GetHost(), inst.GetPort()), nil)
if err != nil {
return nil, fmt.Errorf("failed to create request: %w", err)
}
// Use a custom response writer to capture the response
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
bodyBytes, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("status %d: %s", resp.StatusCode, string(bodyBytes))
}
var result struct {
Data []LlamaCppModel `json:"data"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
return result.Data, nil
}
// OpenAIListInstances godoc
// @Summary List instances in OpenAI-compatible format
// @Description Returns a list of instances in a format compatible with OpenAI API
@@ -40,14 +90,41 @@ func (h *Handler) OpenAIListInstances() http.HandlerFunc {
return
}
openaiInstances := make([]OpenAIInstance, len(instances))
for i, inst := range instances {
openaiInstances[i] = OpenAIInstance{
var openaiInstances []OpenAIInstance
// For each llama.cpp instance, try to fetch models and add them as separate entries
for _, inst := range instances {
if inst.GetBackendType() == backends.BackendTypeLlamaCpp && inst.IsRunning() {
// Try to fetch models from the instance
models, err := fetchLlamaCppModels(inst)
if err != nil {
fmt.Printf("Failed to fetch models from instance %s: %v", inst.Name, err)
continue
}
for _, model := range models {
openaiInstances = append(openaiInstances, OpenAIInstance{
ID: inst.Name + "/" + model.ID,
Object: "model",
Created: inst.Created,
OwnedBy: inst.Name,
})
}
if len(models) > 1 {
// Skip adding the instance name if multiple models are present
continue
}
}
// Add instance name as single entry (for non-llama.cpp or if model fetch failed)
openaiInstances = append(openaiInstances, OpenAIInstance{
ID: inst.Name,
Object: "model",
Created: inst.Created,
OwnedBy: "llamactl",
}
})
}
openaiResponse := OpenAIListInstancesResponse{
@@ -87,14 +164,28 @@ func (h *Handler) OpenAIProxy() http.HandlerFunc {
return
}
modelName, ok := requestBody["model"].(string)
if !ok || modelName == "" {
writeError(w, http.StatusBadRequest, "invalid_request", "Instance name is required")
reqModelName, ok := requestBody["model"].(string)
if !ok || reqModelName == "" {
writeError(w, http.StatusBadRequest, "invalid_request", "Model name is required")
return
}
// Parse instance name and model name from <instance_name>/<model_name> format
var instanceName string
var modelName string
// Check if model name contains "/"
if idx := strings.Index(reqModelName, "/"); idx != -1 {
// Split into instance and model parts
instanceName = reqModelName[:idx]
modelName = reqModelName[idx+1:]
} else {
instanceName = reqModelName
modelName = reqModelName
}
// Validate instance name at the entry point
validatedName, err := validation.ValidateInstanceName(modelName)
validatedName, err := validation.ValidateInstanceName(instanceName)
if err != nil {
writeError(w, http.StatusBadRequest, "invalid_instance_name", err.Error())
return
@@ -119,6 +210,11 @@ func (h *Handler) OpenAIProxy() http.HandlerFunc {
return
}
if inst.IsRemote() {
// Don't replace model name for remote instances
modelName = reqModelName
}
if !inst.IsRemote() && !inst.IsRunning() {
err := h.ensureInstanceRunning(inst)
if err != nil {
@@ -127,6 +223,16 @@ func (h *Handler) OpenAIProxy() http.HandlerFunc {
}
}
// Update the request body with just the model name
requestBody["model"] = modelName
// Re-marshal the updated body
bodyBytes, err = json.Marshal(requestBody)
if err != nil {
writeError(w, http.StatusInternalServerError, "marshal_error", "Failed to update request body")
return
}
// Recreate the request body from the bytes we read
r.Body = io.NopCloser(bytes.NewReader(bodyBytes))
r.ContentLength = int64(len(bodyBytes))

View File

@@ -73,6 +73,13 @@ func SetupRouter(handler *Handler) *chi.Mux {
})
})
// Llama.cpp instance-specific endpoints
r.Route("/llama-cpp/{name}", func(r chi.Router) {
r.Get("/models", handler.LlamaCppListModels())
r.Post("/models/{model}/load", handler.LlamaCppLoadModel())
r.Post("/models/{model}/unload", handler.LlamaCppUnloadModel())
})
// Node management endpoints
r.Route("/nodes", func(r chi.Router) {
r.Get("/", handler.ListNodes()) // List all nodes

74
test_llm.py Normal file
View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
"""
Simple Python script to interact with local LLM server's OpenAI-compatible API
"""
import requests
# Local LLM server configuration
LLM_SERVER_URL = "http://localhost:8080/v1/chat/completions"
MODEL_NAME = "proxy-test" # Default model name, can be changed based on your setup
def send_message(message, model=MODEL_NAME, temperature=0.7, max_tokens=1000):
"""
Send a message to local LLM server API
Args:
message (str): The message to send
model (str): Model name (depends on your LLM server setup)
temperature (float): Controls randomness (0.0 to 1.0)
max_tokens (int): Maximum tokens in response
Returns:
str: The AI response or error message
"""
headers = {
"Content-Type": "application/json",
"Authorization": "Bearer test-inf"
}
data = {
"model": model,
"messages": [
{
"role": "user",
"content": message
}
],
"temperature": temperature,
"max_tokens": max_tokens,
"stream": False
}
response = requests.post(LLM_SERVER_URL, headers=headers, json=data, timeout=60)
response.raise_for_status()
result = response.json()
return result["choices"][0]["message"]["content"]
def main():
"""Run in interactive mode for continuous conversation"""
print("Local LLM Chat Client")
print("-" * 40)
while True:
try:
user_input = input("\nYou: ").strip()
if not user_input:
continue
print("AI: ", end="", flush=True)
response = send_message(user_input)
print(response)
except KeyboardInterrupt:
print("\nGoodbye!")
break
except EOFError:
print("\nGoodbye!")
break
if __name__ == "__main__":
main()

244
webui/package-lock.json generated
View File

@@ -18,7 +18,7 @@
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"date-fns": "^4.1.0",
"lucide-react": "^0.555.0",
"lucide-react": "^0.560.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"sonner": "^2.0.7",
@@ -40,18 +40,18 @@
"eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.20",
"jsdom": "^27.2.0",
"jsdom": "^27.3.0",
"tw-animate-css": "^1.4.0",
"typescript": "^5.9.3",
"typescript-eslint": "^8.48.0",
"typescript-eslint": "^8.49.0",
"vite": "^7.2.2",
"vitest": "^4.0.8"
}
},
"node_modules/@acemir/cssom": {
"version": "0.9.23",
"resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.23.tgz",
"integrity": "sha512-2kJ1HxBKzPLbmhZpxBiTZggjtgCwKg1ma5RHShxvd6zgqhDEdEkzpiwe7jLkI2p2BrZvFCXIihdoMkl1H39VnA==",
"version": "0.9.28",
"resolved": "https://registry.npmjs.org/@acemir/cssom/-/cssom-0.9.28.tgz",
"integrity": "sha512-LuS6IVEivI75vKN8S04qRD+YySP0RmU/cV8UNukhQZvprxF+76Z43TNo/a08eCodaGhT1Us8etqS1ZRY9/Or0A==",
"dev": true,
"license": "MIT"
},
@@ -76,9 +76,9 @@
}
},
"node_modules/@asamuzakjp/css-color": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.5.tgz",
"integrity": "sha512-lMrXidNhPGsDjytDy11Vwlb6OIGrT3CmLg3VWNFyWkLWtijKl7xjvForlh8vuj0SHGjgl4qZEQzUmYTeQA2JFQ==",
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.1.0.tgz",
"integrity": "sha512-9xiBAtLn4aNsa4mDnpovJvBn72tNEIACyvlqaNJ+ADemR+yeMJWnBudOi2qGDviJa7SwcDOU/TRh5dnET7qk0w==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -86,23 +86,23 @@
"@csstools/css-color-parser": "^3.1.0",
"@csstools/css-parser-algorithms": "^3.0.5",
"@csstools/css-tokenizer": "^3.0.4",
"lru-cache": "^11.2.1"
"lru-cache": "^11.2.2"
}
},
"node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
"version": "11.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz",
"integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==",
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "ISC",
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@asamuzakjp/dom-selector": {
"version": "6.7.4",
"resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.4.tgz",
"integrity": "sha512-buQDjkm+wDPXd6c13534URWZqbz0RP5PAhXZ+LIoa5LgwInT9HVJvGIJivg75vi8I13CxDGdTnz+aY5YUJlIAA==",
"version": "6.7.6",
"resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.7.6.tgz",
"integrity": "sha512-hBaJER6A9MpdG3WgdlOolHmbOYvSk46y7IQN/1+iqiCuUu6iWdQrs9DGKF8ocqsEqWujWf/V7b7vaDgiUmIvUg==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -110,15 +110,15 @@
"bidi-js": "^1.0.3",
"css-tree": "^3.1.0",
"is-potential-custom-element-name": "^1.0.1",
"lru-cache": "^11.2.2"
"lru-cache": "^11.2.4"
}
},
"node_modules/@asamuzakjp/dom-selector/node_modules/lru-cache": {
"version": "11.2.2",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.2.tgz",
"integrity": "sha512-F9ODfyqML2coTIsQpSkRHnLSZMtkU8Q+mSfcaIyKwy58u+8k5nvAYeiNhsyMARvzNcXJ9QfWVrcPsC9e9rAxtg==",
"version": "11.2.4",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz",
"integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==",
"dev": true,
"license": "ISC",
"license": "BlueOak-1.0.0",
"engines": {
"node": "20 || >=22"
}
@@ -161,6 +161,7 @@
"integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@babel/code-frame": "^7.27.1",
"@babel/generator": "^7.28.5",
@@ -510,6 +511,7 @@
}
],
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
},
@@ -518,9 +520,9 @@
}
},
"node_modules/@csstools/css-syntax-patches-for-csstree": {
"version": "1.0.16",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.16.tgz",
"integrity": "sha512-2SpS4/UaWQaGpBINyG5ZuCHnUDeVByOhvbkARwfmnfxDvTaj80yOI1cD8Tw93ICV5Fx4fnyDKWQZI1CDtcWyUg==",
"version": "1.0.14",
"resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz",
"integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==",
"dev": true,
"funding": [
{
@@ -535,6 +537,9 @@
"license": "MIT-0",
"engines": {
"node": ">=18"
},
"peerDependencies": {
"postcss": "^8.4"
}
},
"node_modules/@csstools/css-tokenizer": {
@@ -553,6 +558,7 @@
}
],
"license": "MIT",
"peer": true,
"engines": {
"node": ">=18"
}
@@ -2531,8 +2537,7 @@
"resolved": "https://registry.npmjs.org/@types/aria-query/-/aria-query-5.0.4.tgz",
"integrity": "sha512-rfT93uj5s0PRL7EzccGMs3brplhcrghnDoV26NqKhCAS1hVo+WdNsPvE/yb6ilfr5hi2MEk6d5EWJTKdxg8jVw==",
"dev": true,
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/@types/babel__core": {
"version": "7.20.5",
@@ -2616,6 +2621,7 @@
"integrity": "sha512-GNWcUTRBgIRJD5zj+Tq0fKOJ5XZajIiBroOF0yvj2bSU1WvNdYS/dn9UxwsujGW4JX06dnHyjV2y9rRaybH0iQ==",
"devOptional": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~7.16.0"
}
@@ -2626,6 +2632,7 @@
"integrity": "sha512-tBFxBp9Nfyy5rsmefN+WXc1JeW/j2BpBHFdLZbEVfs9wn3E3NRFxwV0pJg8M1qQAexFpvz73hJXFofV0ZAu92A==",
"devOptional": true,
"license": "MIT",
"peer": true,
"dependencies": {
"csstype": "^3.0.2"
}
@@ -2636,23 +2643,23 @@
"integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==",
"devOptional": true,
"license": "MIT",
"peer": true,
"peerDependencies": {
"@types/react": "^19.2.0"
}
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.48.0.tgz",
"integrity": "sha512-XxXP5tL1txl13YFtrECECQYeZjBZad4fyd3cFV4a19LkAY/bIp9fev3US4S5fDVV2JaYFiKAZ/GRTOLer+mbyQ==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.49.0.tgz",
"integrity": "sha512-JXij0vzIaTtCwu6SxTh8qBc66kmf1xs7pI4UOiMDFVct6q86G0Zs7KRcEoJgY3Cav3x5Tq0MF5jwgpgLqgKG3A==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
"@typescript-eslint/scope-manager": "8.48.0",
"@typescript-eslint/type-utils": "8.48.0",
"@typescript-eslint/utils": "8.48.0",
"@typescript-eslint/visitor-keys": "8.48.0",
"graphemer": "^1.4.0",
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/type-utils": "8.49.0",
"@typescript-eslint/utils": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"ignore": "^7.0.0",
"natural-compare": "^1.4.0",
"ts-api-utils": "^2.1.0"
@@ -2665,7 +2672,7 @@
"url": "https://opencollective.com/typescript-eslint"
},
"peerDependencies": {
"@typescript-eslint/parser": "^8.48.0",
"@typescript-eslint/parser": "^8.49.0",
"eslint": "^8.57.0 || ^9.0.0",
"typescript": ">=4.8.4 <6.0.0"
}
@@ -2681,16 +2688,17 @@
}
},
"node_modules/@typescript-eslint/parser": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.48.0.tgz",
"integrity": "sha512-jCzKdm/QK0Kg4V4IK/oMlRZlY+QOcdjv89U2NgKHZk1CYTj82/RVSx1mV/0gqCVMJ/DA+Zf/S4NBWNF8GQ+eqQ==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.49.0.tgz",
"integrity": "sha512-N9lBGA9o9aqb1hVMc9hzySbhKibHmB+N3IpoShyV6HyQYRGIhlrO5rQgttypi+yEeKsKI4idxC8Jw6gXKD4THA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@typescript-eslint/scope-manager": "8.48.0",
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/typescript-estree": "8.48.0",
"@typescript-eslint/visitor-keys": "8.48.0",
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2706,14 +2714,14 @@
}
},
"node_modules/@typescript-eslint/project-service": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.48.0.tgz",
"integrity": "sha512-Ne4CTZyRh1BecBf84siv42wv5vQvVmgtk8AuiEffKTUo3DrBaGYZueJSxxBZ8fjk/N3DrgChH4TOdIOwOwiqqw==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.49.0.tgz",
"integrity": "sha512-/wJN0/DKkmRUMXjZUXYZpD1NEQzQAAn9QWfGwo+Ai8gnzqH7tvqS7oNVdTjKqOcPyVIdZdyCMoqN66Ia789e7g==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/tsconfig-utils": "^8.48.0",
"@typescript-eslint/types": "^8.48.0",
"@typescript-eslint/tsconfig-utils": "^8.49.0",
"@typescript-eslint/types": "^8.49.0",
"debug": "^4.3.4"
},
"engines": {
@@ -2728,14 +2736,14 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.48.0.tgz",
"integrity": "sha512-uGSSsbrtJrLduti0Q1Q9+BF1/iFKaxGoQwjWOIVNJv0o6omrdyR8ct37m4xIl5Zzpkp69Kkmvom7QFTtue89YQ==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.49.0.tgz",
"integrity": "sha512-npgS3zi+/30KSOkXNs0LQXtsg9ekZ8OISAOLGWA/ZOEn0ZH74Ginfl7foziV8DT+D98WfQ5Kopwqb/PZOaIJGg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/visitor-keys": "8.48.0"
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2746,9 +2754,9 @@
}
},
"node_modules/@typescript-eslint/tsconfig-utils": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.48.0.tgz",
"integrity": "sha512-WNebjBdFdyu10sR1M4OXTt2OkMd5KWIL+LLfeH9KhgP+jzfDV/LI3eXzwJ1s9+Yc0Kzo2fQCdY/OpdusCMmh6w==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.49.0.tgz",
"integrity": "sha512-8prixNi1/6nawsRYxet4YOhnbW+W9FK/bQPxsGB1D3ZrDzbJ5FXw5XmzxZv82X3B+ZccuSxo/X8q9nQ+mFecWA==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2763,15 +2771,15 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.48.0.tgz",
"integrity": "sha512-zbeVaVqeXhhab6QNEKfK96Xyc7UQuoFWERhEnj3mLVnUWrQnv15cJNseUni7f3g557gm0e46LZ6IJ4NJVOgOpw==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.49.0.tgz",
"integrity": "sha512-KTExJfQ+svY8I10P4HdxKzWsvtVnsuCifU5MvXrRwoP2KOlNZ9ADNEWWsQTJgMxLzS5VLQKDjkCT/YzgsnqmZg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/typescript-estree": "8.48.0",
"@typescript-eslint/utils": "8.48.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/utils": "8.49.0",
"debug": "^4.3.4",
"ts-api-utils": "^2.1.0"
},
@@ -2788,9 +2796,9 @@
}
},
"node_modules/@typescript-eslint/types": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.48.0.tgz",
"integrity": "sha512-cQMcGQQH7kwKoVswD1xdOytxQR60MWKM1di26xSUtxehaDs/32Zpqsu5WJlXTtTTqyAVK8R7hvsUnIXRS+bjvA==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.49.0.tgz",
"integrity": "sha512-e9k/fneezorUo6WShlQpMxXh8/8wfyc+biu6tnAqA81oWrEic0k21RHzP9uqqpyBBeBKu4T+Bsjy9/b8u7obXQ==",
"dev": true,
"license": "MIT",
"engines": {
@@ -2802,16 +2810,16 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.48.0.tgz",
"integrity": "sha512-ljHab1CSO4rGrQIAyizUS6UGHHCiAYhbfcIZ1zVJr5nMryxlXMVWS3duFPSKvSUbFPwkXMFk1k0EMIjub4sRRQ==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.49.0.tgz",
"integrity": "sha512-jrLdRuAbPfPIdYNppHJ/D0wN+wwNfJ32YTAm10eJVsFmrVpXQnDWBn8niCSMlWjvml8jsce5E/O+86IQtTbJWA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/project-service": "8.48.0",
"@typescript-eslint/tsconfig-utils": "8.48.0",
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/visitor-keys": "8.48.0",
"@typescript-eslint/project-service": "8.49.0",
"@typescript-eslint/tsconfig-utils": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/visitor-keys": "8.49.0",
"debug": "^4.3.4",
"minimatch": "^9.0.4",
"semver": "^7.6.0",
@@ -2869,16 +2877,16 @@
}
},
"node_modules/@typescript-eslint/utils": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.48.0.tgz",
"integrity": "sha512-yTJO1XuGxCsSfIVt1+1UrLHtue8xz16V8apzPYI06W0HbEbEWHxHXgZaAgavIkoh+GeV6hKKd5jm0sS6OYxWXQ==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.49.0.tgz",
"integrity": "sha512-N3W7rJw7Rw+z1tRsHZbK395TWSYvufBXumYtEGzypgMUthlg0/hmCImeA8hgO2d2G4pd7ftpxxul2J8OdtdaFA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.7.0",
"@typescript-eslint/scope-manager": "8.48.0",
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/typescript-estree": "8.48.0"
"@typescript-eslint/scope-manager": "8.49.0",
"@typescript-eslint/types": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2893,13 +2901,13 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.48.0.tgz",
"integrity": "sha512-T0XJMaRPOH3+LBbAfzR2jalckP1MSG/L9eUtY0DEzUyVaXJ/t6zN0nR7co5kz0Jko/nkSYCBRkz1djvjajVTTg==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.49.0.tgz",
"integrity": "sha512-LlKaciDe3GmZFphXIc79THF/YYBugZ7FS1pO581E/edlVVNbZKDy93evqmrfQ9/Y4uN0vVhX4iuchq26mK/iiA==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/types": "8.48.0",
"@typescript-eslint/types": "8.49.0",
"eslint-visitor-keys": "^4.2.1"
},
"engines": {
@@ -3034,6 +3042,7 @@
"integrity": "sha512-F9jI5rSstNknPlTlPN2gcc4gpbaagowuRzw/OJzl368dvPun668Q182S8Q8P9PITgGCl5LAKXpzuue106eM4wA==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@vitest/utils": "4.0.8",
"fflate": "^0.8.2",
@@ -3070,6 +3079,7 @@
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -3120,7 +3130,6 @@
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=8"
}
@@ -3392,6 +3401,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"caniuse-lite": "^1.0.30001726",
"electron-to-chromium": "^1.5.173",
@@ -3614,14 +3624,14 @@
"license": "MIT"
},
"node_modules/cssstyle": {
"version": "5.3.3",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.3.tgz",
"integrity": "sha512-OytmFH+13/QXONJcC75QNdMtKpceNk3u8ThBjyyYjkEcy/ekBwR1mMAuNvi3gdBPW3N5TlCzQ0WZw8H0lN/bDw==",
"version": "5.3.4",
"resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.4.tgz",
"integrity": "sha512-KyOS/kJMEq5O9GdPnaf82noigg5X5DYn0kZPJTaAsCUaBizp6Xa1y9D4Qoqf/JazEXWuruErHgVXwjN5391ZJw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@asamuzakjp/css-color": "^4.0.3",
"@csstools/css-syntax-patches-for-csstree": "^1.0.14",
"@asamuzakjp/css-color": "^4.1.0",
"@csstools/css-syntax-patches-for-csstree": "1.0.14",
"css-tree": "^3.1.0"
},
"engines": {
@@ -3824,8 +3834,7 @@
"resolved": "https://registry.npmjs.org/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz",
"integrity": "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg==",
"dev": true,
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/dunder-proto": {
"version": "1.0.1",
@@ -4129,6 +4138,7 @@
"integrity": "sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.8.0",
"@eslint-community/regexpp": "^4.12.1",
@@ -4665,13 +4675,6 @@
"integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
"license": "ISC"
},
"node_modules/graphemer": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
"dev": true,
"license": "MIT"
},
"node_modules/has-bigints": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz",
@@ -5349,15 +5352,16 @@
}
},
"node_modules/jsdom": {
"version": "27.2.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.2.0.tgz",
"integrity": "sha512-454TI39PeRDW1LgpyLPyURtB4Zx1tklSr6+OFOipsxGUH1WMTvk6C65JQdrj455+DP2uJ1+veBEHTGFKWVLFoA==",
"version": "27.3.0",
"resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.3.0.tgz",
"integrity": "sha512-GtldT42B8+jefDUC4yUKAvsaOrH7PDHmZxZXNgF2xMmymjUbRYJvpAybZAKEmXDGTM0mCsz8duOa4vTm5AY2Kg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@acemir/cssom": "^0.9.23",
"@asamuzakjp/dom-selector": "^6.7.4",
"cssstyle": "^5.3.3",
"@acemir/cssom": "^0.9.28",
"@asamuzakjp/dom-selector": "^6.7.6",
"cssstyle": "^5.3.4",
"data-urls": "^6.0.0",
"decimal.js": "^10.6.0",
"html-encoding-sniffer": "^4.0.0",
@@ -5750,9 +5754,9 @@
}
},
"node_modules/lucide-react": {
"version": "0.555.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.555.0.tgz",
"integrity": "sha512-D8FvHUGbxWBRQM90NZeIyhAvkFfsh3u9ekrMvJ30Z6gnpBHS6HC6ldLg7tL45hwiIz/u66eKDtdA23gwwGsAHA==",
"version": "0.560.0",
"resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.560.0.tgz",
"integrity": "sha512-NwKoUA/aBShsdL8WE5lukV2F/tjHzQRlonQs7fkNGI1sCT0Ay4a9Ap3ST2clUUkcY+9eQ0pBe2hybTQd2fmyDA==",
"license": "ISC",
"peerDependencies": {
"react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0"
@@ -5764,7 +5768,6 @@
"integrity": "sha512-h5bgJWpxJNswbU7qCrV0tIKQCaS3blPDrqKWx+QxzuzL1zGUzij9XCWLrSLsJPu5t+eWA/ycetzYAO5IOMcWAQ==",
"dev": true,
"license": "MIT",
"peer": true,
"bin": {
"lz-string": "bin/bin.js"
}
@@ -6150,6 +6153,7 @@
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=12"
},
@@ -6186,6 +6190,7 @@
}
],
"license": "MIT",
"peer": true,
"dependencies": {
"nanoid": "^3.3.11",
"picocolors": "^1.1.1",
@@ -6211,7 +6216,6 @@
"integrity": "sha512-Qb1gy5OrP5+zDf2Bvnzdl3jsTf1qXVMazbvCoKhtKqVs4/YK4ozX4gKQJJVyNe+cajNPn0KoC0MC3FUmaHWEmQ==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"ansi-regex": "^5.0.1",
"ansi-styles": "^5.0.0",
@@ -6227,7 +6231,6 @@
"integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==",
"dev": true,
"license": "MIT",
"peer": true,
"engines": {
"node": ">=10"
},
@@ -6269,6 +6272,7 @@
"resolved": "https://registry.npmjs.org/react/-/react-19.2.0.tgz",
"integrity": "sha512-tmbWg6W31tQLeB5cdIBOicJDJRR2KzXsV7uSK9iNfLWQ5bIZfxuPEHp7M8wiHyHnn0DD1i7w3Zmin0FtkrwoCQ==",
"license": "MIT",
"peer": true,
"engines": {
"node": ">=0.10.0"
}
@@ -6278,6 +6282,7 @@
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.0.tgz",
"integrity": "sha512-UlbRu4cAiGaIewkPyiRGJk0imDN2T3JjieT6spoL2UeSf5od4n5LB/mQ4ejmxhCFT1tYe8IvaFulzynWovsEFQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"scheduler": "^0.27.0"
},
@@ -6290,8 +6295,7 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-17.0.2.tgz",
"integrity": "sha512-w2GsyukL62IJnlaff/nRegPQR94C/XXamvMWmSHRJ4y7Ts/4ocGRmTHvOs8PSE6pB3dWOrD/nueuU5sduBsQ4w==",
"dev": true,
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/react-refresh": {
"version": "0.18.0",
@@ -7245,6 +7249,7 @@
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
@@ -7254,16 +7259,16 @@
}
},
"node_modules/typescript-eslint": {
"version": "8.48.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.48.0.tgz",
"integrity": "sha512-fcKOvQD9GUn3Xw63EgiDqhvWJ5jsyZUaekl3KVpGsDJnN46WJTe3jWxtQP9lMZm1LJNkFLlTaWAxK2vUQR+cqw==",
"version": "8.49.0",
"resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.49.0.tgz",
"integrity": "sha512-zRSVH1WXD0uXczCXw+nsdjGPUdx4dfrs5VQoHnUWmv1U3oNlAKv4FUNdLDhVUg+gYn+a5hUESqch//Rv5wVhrg==",
"dev": true,
"license": "MIT",
"dependencies": {
"@typescript-eslint/eslint-plugin": "8.48.0",
"@typescript-eslint/parser": "8.48.0",
"@typescript-eslint/typescript-estree": "8.48.0",
"@typescript-eslint/utils": "8.48.0"
"@typescript-eslint/eslint-plugin": "8.49.0",
"@typescript-eslint/parser": "8.49.0",
"@typescript-eslint/typescript-estree": "8.49.0",
"@typescript-eslint/utils": "8.49.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -7392,6 +7397,7 @@
"resolved": "https://registry.npmjs.org/vite/-/vite-7.2.2.tgz",
"integrity": "sha512-BxAKBWmIbrDgrokdGZH1IgkIk/5mMHDreLDmCJ0qpyJaAteP8NvMhkwr/ZCQNqNH97bw/dANTE9PDzqwJghfMQ==",
"license": "MIT",
"peer": true,
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.5.0",
@@ -7467,6 +7473,7 @@
"integrity": "sha512-urzu3NCEV0Qa0Y2PwvBtRgmNtxhj5t5ULw7cuKhIHh3OrkKTLlut0lnBOv9qe5OvbkMH2g38G7KPDCTpIytBVg==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"@vitest/expect": "4.0.8",
"@vitest/mocker": "4.0.8",
@@ -7795,6 +7802,7 @@
"resolved": "https://registry.npmjs.org/zod/-/zod-4.1.12.tgz",
"integrity": "sha512-JInaHOamG8pt5+Ey8kGmdcAcg3OL9reK8ltczgHTAwNhMys/6ThXHityHxVV2p3fkw/c+MAvBHFVYHFZDmjMCQ==",
"license": "MIT",
"peer": true,
"funding": {
"url": "https://github.com/sponsors/colinhacks"
}

View File

@@ -27,7 +27,7 @@
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"date-fns": "^4.1.0",
"lucide-react": "^0.555.0",
"lucide-react": "^0.560.0",
"react": "^19.2.0",
"react-dom": "^19.2.0",
"sonner": "^2.0.7",
@@ -49,10 +49,10 @@
"eslint-plugin-react": "^7.37.5",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.20",
"jsdom": "^27.2.0",
"jsdom": "^27.3.0",
"tw-animate-css": "^1.4.0",
"typescript": "^5.9.3",
"typescript-eslint": "^8.48.0",
"typescript-eslint": "^8.49.0",
"vite": "^7.2.2",
"vitest": "^4.0.8"
}

View File

@@ -2,13 +2,14 @@
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import type { Instance } from "@/types/instance";
import { Edit, FileText, Play, Square, Trash2, MoreHorizontal, Download } from "lucide-react";
import { Edit, FileText, Play, Square, Trash2, MoreHorizontal, Download, Boxes } from "lucide-react";
import LogsDialog from "@/components/LogDialog";
import ModelsDialog from "@/components/ModelsDialog";
import HealthBadge from "@/components/HealthBadge";
import BackendBadge from "@/components/BackendBadge";
import { useState } from "react";
import { useState, useEffect } from "react";
import { useInstanceHealth } from "@/hooks/useInstanceHealth";
import { instancesApi } from "@/lib/api";
import { instancesApi, llamaCppApi } from "@/lib/api";
interface InstanceCardProps {
instance: Instance;
@@ -26,9 +27,31 @@ function InstanceCard({
editInstance,
}: InstanceCardProps) {
const [isLogsOpen, setIsLogsOpen] = useState(false);
const [isModelsOpen, setIsModelsOpen] = useState(false);
const [showAllActions, setShowAllActions] = useState(false);
const [modelCount, setModelCount] = useState(0);
const health = useInstanceHealth(instance.name, instance.status);
const running = instance.status === "running";
const isLlamaCpp = instance.options?.backend_type === "llama_cpp";
// Fetch model count for llama.cpp instances
useEffect(() => {
if (!isLlamaCpp || !running) {
setModelCount(0);
return;
}
void (async () => {
try {
const models = await llamaCppApi.getModels(instance.name);
setModelCount(models.length);
} catch {
setModelCount(0);
}
})();
}, [instance.name, isLlamaCpp, running]);
const handleStart = () => {
startInstance(instance.name);
};
@@ -53,6 +76,10 @@ function InstanceCard({
setIsLogsOpen(true);
};
const handleModels = () => {
setIsModelsOpen(true);
};
const handleExport = () => {
void (async () => {
try {
@@ -83,8 +110,6 @@ function InstanceCard({
})();
};
const running = instance.status === "running";
return (
<>
<Card className="hover:shadow-md transition-shadow">
@@ -162,6 +187,20 @@ function InstanceCard({
Logs
</Button>
{isLlamaCpp && modelCount > 1 && (
<Button
size="sm"
variant="outline"
onClick={handleModels}
title="Manage models"
data-testid="manage-models-button"
className="flex-1"
>
<Boxes className="h-4 w-4 mr-1" />
Models ({modelCount})
</Button>
)}
<Button
size="sm"
variant="outline"
@@ -195,6 +234,13 @@ function InstanceCard({
instanceName={instance.name}
isRunning={running}
/>
<ModelsDialog
open={isModelsOpen}
onOpenChange={setIsModelsOpen}
instanceName={instance.name}
isRunning={running}
/>
</>
);
}

View File

@@ -0,0 +1,287 @@
import React, { useState, useEffect } from 'react'
import { Button } from '@/components/ui/button'
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from '@/components/ui/dialog'
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from '@/components/ui/table'
import { Badge } from '@/components/ui/badge'
import { llamaCppApi } from '@/lib/api'
import { RefreshCw, Loader2, AlertCircle } from 'lucide-react'
interface ModelsDialogProps {
open: boolean
onOpenChange: (open: boolean) => void
instanceName: string
isRunning: boolean
}
interface Model {
id: string
object: string
owned_by: string
created: number
in_cache: boolean
path: string
status: {
value: string // "loaded" | "loading" | "unloaded"
args: string[]
}
}
const StatusIcon: React.FC<{ status: string }> = ({ status }) => {
switch (status) {
case 'loaded':
return (
<div className="h-2 w-2 rounded-full bg-green-500" />
)
case 'loading':
return (
<Loader2
className="h-3 w-3 animate-spin text-yellow-500"
/>
)
case 'unloaded':
return (
<div className="h-2 w-2 rounded-full bg-gray-400" />
)
default:
return null
}
}
const ModelsDialog: React.FC<ModelsDialogProps> = ({
open,
onOpenChange,
instanceName,
isRunning,
}) => {
const [models, setModels] = useState<Model[]>([])
const [loading, setLoading] = useState(false)
const [error, setError] = useState<string | null>(null)
const [loadingModels, setLoadingModels] = useState<Set<string>>(new Set())
// Fetch models function
const fetchModels = React.useCallback(async () => {
if (!instanceName || !isRunning) return
setLoading(true)
setError(null)
try {
const response = await llamaCppApi.getModels(instanceName)
setModels(response)
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to fetch models')
} finally {
setLoading(false)
}
}, [instanceName, isRunning])
// Poll for models while dialog is open
useEffect(() => {
if (!open || !isRunning) return
// Initial fetch
void fetchModels()
// Poll every 2 seconds
const interval = setInterval(() => {
void fetchModels()
}, 2000)
return () => clearInterval(interval)
}, [open, isRunning, fetchModels])
// Load model
const loadModel = async (modelName: string) => {
setLoadingModels((prev) => new Set(prev).add(modelName))
setError(null)
try {
await llamaCppApi.loadModel(instanceName, modelName)
// Polling will pick up the change
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to load model')
} finally {
setLoadingModels((prev) => {
const newSet = new Set(prev)
newSet.delete(modelName)
return newSet
})
}
}
// Unload model
const unloadModel = async (modelName: string) => {
setLoadingModels((prev) => new Set(prev).add(modelName))
setError(null)
try {
await llamaCppApi.unloadModel(instanceName, modelName)
// Polling will pick up the change
} catch (err) {
setError(err instanceof Error ? err.message : 'Failed to unload model')
} finally {
setLoadingModels((prev) => {
const newSet = new Set(prev)
newSet.delete(modelName)
return newSet
})
}
}
return (
<Dialog open={open} onOpenChange={onOpenChange}>
<DialogContent className="sm:max-w-4xl max-w-[calc(100%-2rem)] max-h-[80vh] flex flex-col">
<DialogHeader>
<div className="flex items-center justify-between">
<div>
<DialogTitle className="flex items-center gap-2">
Models: {instanceName}
<Badge variant={isRunning ? 'default' : 'secondary'}>
{isRunning ? 'Running' : 'Stopped'}
</Badge>
</DialogTitle>
<DialogDescription>
Manage models in this llama.cpp instance
</DialogDescription>
</div>
<Button
variant="outline"
size="sm"
onClick={() => void fetchModels()}
disabled={loading || !isRunning}
>
{loading ? (
<Loader2 className="h-4 w-4 animate-spin" />
) : (
<RefreshCw className="h-4 w-4" />
)}
</Button>
</div>
</DialogHeader>
{/* Error Display */}
{error && (
<div className="flex items-center gap-2 p-3 bg-destructive/10 border border-destructive/20 rounded-lg">
<AlertCircle className="h-4 w-4 text-destructive" />
<span className="text-sm text-destructive">{error}</span>
</div>
)}
{/* Models Table */}
<div className="flex-1 flex flex-col min-h-0 overflow-auto">
{!isRunning ? (
<div className="flex items-center justify-center h-full text-muted-foreground">
Instance is not running
</div>
) : loading && models.length === 0 ? (
<div className="flex items-center justify-center h-full">
<Loader2 className="h-6 w-6 animate-spin text-muted-foreground" />
<span className="ml-2 text-muted-foreground">
Loading models...
</span>
</div>
) : models.length === 0 ? (
<div className="flex items-center justify-center h-full text-muted-foreground">
No models found
</div>
) : (
<Table>
<TableHeader>
<TableRow>
<TableHead>Model</TableHead>
<TableHead>Status</TableHead>
<TableHead className="text-right">Actions</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{models.map((model) => {
const isLoading = loadingModels.has(model.id)
const isModelLoading = model.status.value === 'loading'
return (
<TableRow key={model.id}>
<TableCell className="font-mono text-sm">
{model.id}
</TableCell>
<TableCell>
<div className="flex items-center gap-2">
<StatusIcon status={model.status.value} />
<span className="text-sm capitalize">
{model.status.value}
</span>
</div>
</TableCell>
<TableCell className="text-right">
{model.status.value === 'loaded' ? (
<Button
size="sm"
variant="outline"
onClick={() => unloadModel(model.id)}
disabled={!isRunning || isLoading || isModelLoading}
>
{isLoading ? (
<>
<Loader2 className="h-3 w-3 animate-spin mr-1" />
Unloading...
</>
) : (
'Unload'
)}
</Button>
) : model.status.value === 'unloaded' ? (
<Button
size="sm"
variant="default"
onClick={() => loadModel(model.id)}
disabled={!isRunning || isLoading || isModelLoading}
>
{isLoading ? (
<>
<Loader2 className="h-3 w-3 animate-spin mr-1" />
Loading...
</>
) : (
'Load'
)}
</Button>
) : (
<Button size="sm" variant="ghost" disabled>
Loading...
</Button>
)}
</TableCell>
</TableRow>
)
})}
</TableBody>
</Table>
)}
</div>
{/* Auto-refresh indicator */}
{isRunning && (
<div className="flex items-center gap-2 text-sm text-muted-foreground">
<div className="w-2 h-2 bg-green-500 rounded-full animate-pulse"></div>
Auto-refreshing every 2 seconds
</div>
)}
</DialogContent>
</Dialog>
)
}
export default ModelsDialog

View File

@@ -0,0 +1,117 @@
import * as React from "react"
import { cn } from "@/lib/utils"
const Table = React.forwardRef<
HTMLTableElement,
React.HTMLAttributes<HTMLTableElement>
>(({ className, ...props }, ref) => (
<div className="relative w-full overflow-auto">
<table
ref={ref}
className={cn("w-full caption-bottom text-sm", className)}
{...props}
/>
</div>
))
Table.displayName = "Table"
const TableHeader = React.forwardRef<
HTMLTableSectionElement,
React.HTMLAttributes<HTMLTableSectionElement>
>(({ className, ...props }, ref) => (
<thead ref={ref} className={cn("[&_tr]:border-b", className)} {...props} />
))
TableHeader.displayName = "TableHeader"
const TableBody = React.forwardRef<
HTMLTableSectionElement,
React.HTMLAttributes<HTMLTableSectionElement>
>(({ className, ...props }, ref) => (
<tbody
ref={ref}
className={cn("[&_tr:last-child]:border-0", className)}
{...props}
/>
))
TableBody.displayName = "TableBody"
const TableFooter = React.forwardRef<
HTMLTableSectionElement,
React.HTMLAttributes<HTMLTableSectionElement>
>(({ className, ...props }, ref) => (
<tfoot
ref={ref}
className={cn(
"border-t bg-muted/50 font-medium [&>tr]:last:border-b-0",
className
)}
{...props}
/>
))
TableFooter.displayName = "TableFooter"
const TableRow = React.forwardRef<
HTMLTableRowElement,
React.HTMLAttributes<HTMLTableRowElement>
>(({ className, ...props }, ref) => (
<tr
ref={ref}
className={cn(
"border-b transition-colors hover:bg-muted/50 data-[state=selected]:bg-muted",
className
)}
{...props}
/>
))
TableRow.displayName = "TableRow"
const TableHead = React.forwardRef<
HTMLTableCellElement,
React.ThHTMLAttributes<HTMLTableCellElement>
>(({ className, ...props }, ref) => (
<th
ref={ref}
className={cn(
"h-12 px-4 text-left align-middle font-medium text-muted-foreground [&:has([role=checkbox])]:pr-0",
className
)}
{...props}
/>
))
TableHead.displayName = "TableHead"
const TableCell = React.forwardRef<
HTMLTableCellElement,
React.TdHTMLAttributes<HTMLTableCellElement>
>(({ className, ...props }, ref) => (
<td
ref={ref}
className={cn("p-4 align-middle [&:has([role=checkbox])]:pr-0", className)}
{...props}
/>
))
TableCell.displayName = "TableCell"
const TableCaption = React.forwardRef<
HTMLTableCaptionElement,
React.HTMLAttributes<HTMLTableCaptionElement>
>(({ className, ...props }, ref) => (
<caption
ref={ref}
className={cn("mt-4 text-sm text-muted-foreground", className)}
{...props}
/>
))
TableCaption.displayName = "TableCaption"
export {
Table,
TableHeader,
TableBody,
TableFooter,
TableHead,
TableRow,
TableCell,
TableCaption,
}

View File

@@ -205,3 +205,51 @@ export const apiKeysApi = {
getPermissions: (id: number) =>
apiCall<KeyPermissionResponse[]>(`/auth/keys/${id}/permissions`),
};
// Llama.cpp model management types
export interface Model {
id: string;
object: string;
owned_by: string;
created: number;
in_cache: boolean;
path: string;
status: {
value: string; // "loaded" | "loading" | "unloaded"
args: string[];
};
}
export interface ModelsListResponse {
object: string;
data: Model[];
}
// Llama.cpp model management API functions
export const llamaCppApi = {
// GET /llama-cpp/{name}/models
getModels: async (instanceName: string): Promise<Model[]> => {
const response = await apiCall<ModelsListResponse>(
`/llama-cpp/${encodeURIComponent(instanceName)}/models`
);
return response.data;
},
// POST /llama-cpp/{name}/models/{model}/load
loadModel: (instanceName: string, modelName: string) =>
apiCall<{ status: string; message: string }>(
`/llama-cpp/${encodeURIComponent(instanceName)}/models/${encodeURIComponent(modelName)}/load`,
{
method: "POST",
}
),
// POST /llama-cpp/{name}/models/{model}/unload
unloadModel: (instanceName: string, modelName: string) =>
apiCall<{ status: string; message: string }>(
`/llama-cpp/${encodeURIComponent(instanceName)}/models/${encodeURIComponent(modelName)}/unload`,
{
method: "POST",
}
),
};