Merge pull request #118 from lordmathis/chore/remove-deprecated

chore: Remove deprecated code
This commit is contained in:
2025-12-22 21:53:19 +01:00
committed by GitHub
15 changed files with 439 additions and 180 deletions

View File

@@ -184,7 +184,6 @@ data_dir: ~/.local/share/llamactl # Main data directory (database, instances, l
instances:
port_range: [8000, 9000] # Port range for instances
configs_dir: ~/.local/share/llamactl/instances # Instance configs directory (platform dependent) [deprecated]
logs_dir: ~/.local/share/llamactl/logs # Logs directory (platform dependent)
auto_create_dirs: true # Auto-create data/config/logs dirs if missing
max_instances: -1 # Max instances (-1 = unlimited)

View File

@@ -57,11 +57,6 @@ func main() {
log.Printf("Error creating data directory %s: %v\nData persistence may not be available.", cfg.DataDir, err)
}
// Create instances directory
if err := os.MkdirAll(cfg.Instances.InstancesDir, 0755); err != nil {
log.Printf("Error creating instances directory %s: %v\nPersistence will not be available.", cfg.Instances.InstancesDir, err)
}
// Create logs directory
if err := os.MkdirAll(cfg.Instances.LogsDir, 0755); err != nil {
log.Printf("Error creating log directory %s: %v\nInstance logs will not be available.", cfg.Instances.LogsDir, err)
@@ -84,11 +79,6 @@ func main() {
log.Fatalf("Failed to run database migrations: %v", err)
}
// Migrate from JSON files if needed (one-time migration)
if err := migrateFromJSON(&cfg, db); err != nil {
log.Printf("Warning: Failed to migrate from JSON: %v", err)
}
// Initialize the instance manager with dependency injection
instanceManager := manager.New(&cfg, db)

View File

@@ -1,87 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"llamactl/pkg/config"
"llamactl/pkg/database"
"llamactl/pkg/instance"
"log"
"os"
"path/filepath"
)
// migrateFromJSON migrates instances from JSON files to SQLite database
// This is a one-time migration that runs on first startup with existing JSON files.
// Migrated files are moved to a migrated subdirectory to avoid re-importing.
func migrateFromJSON(cfg *config.AppConfig, db database.InstanceStore) error {
instancesDir := cfg.Instances.InstancesDir
if instancesDir == "" {
return nil // No instances directory configured
}
// Check if instances directory exists
if _, err := os.Stat(instancesDir); os.IsNotExist(err) {
return nil // No instances directory, nothing to migrate
}
// Find all JSON files
files, err := filepath.Glob(filepath.Join(instancesDir, "*.json"))
if err != nil {
return fmt.Errorf("failed to list instance files: %w", err)
}
if len(files) == 0 {
return nil // No JSON files to migrate
}
log.Printf("Migrating %d instances from JSON to SQLite...", len(files))
// Create migrated directory
migratedDir := filepath.Join(instancesDir, "migrated")
if err := os.MkdirAll(migratedDir, 0755); err != nil {
return fmt.Errorf("failed to create migrated directory: %w", err)
}
// Migrate each JSON file
var migrated int
for _, file := range files {
if err := migrateJSONFile(file, db); err != nil {
log.Printf("Failed to migrate %s: %v", file, err)
continue
}
// Move the file to the migrated directory
destPath := filepath.Join(migratedDir, filepath.Base(file))
if err := os.Rename(file, destPath); err != nil {
log.Printf("Warning: Failed to move %s to migrated directory: %v", file, err)
// Don't fail the migration if we can't move the file
}
migrated++
}
log.Printf("Successfully migrated %d/%d instances to SQLite", migrated, len(files))
return nil
}
// migrateJSONFile migrates a single JSON file to the database
func migrateJSONFile(filename string, db database.InstanceStore) error {
data, err := os.ReadFile(filename)
if err != nil {
return fmt.Errorf("failed to read file: %w", err)
}
var inst instance.Instance
if err := json.Unmarshal(data, &inst); err != nil {
return fmt.Errorf("failed to unmarshal instance: %w", err)
}
if err := db.Save(&inst); err != nil {
return fmt.Errorf("failed to save instance to database: %w", err)
}
log.Printf("Migrated instance %s from JSON to SQLite", inst.Name)
return nil
}

View File

@@ -999,6 +999,156 @@ const docTemplate = `{
}
}
},
"/api/v1/llama-cpp/{name}/models": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a list of models available in the specified llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "List models in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Models list response",
"schema": {
"type": "object",
"additionalProperties": true
}
},
"400": {
"description": "Invalid instance",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/llama-cpp/{name}/models/{model}/load": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Loads the specified model in the given llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "Load a model in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "string",
"description": "Model Name",
"name": "model",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Success message",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/llama-cpp/{name}/models/{model}/unload": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Unloads the specified model in the given llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "Unload a model in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "string",
"description": "Model Name",
"name": "model",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Success message",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/nodes": {
"get": {
"security": [
@@ -1788,13 +1938,6 @@ const docTemplate = `{
"config.AuthConfig": {
"type": "object",
"properties": {
"inference_keys": {
"description": "List of keys for OpenAI compatible inference endpoints",
"type": "array",
"items": {
"type": "string"
}
},
"management_keys": {
"description": "List of keys for management endpoints",
"type": "array",
@@ -1905,10 +2048,6 @@ const docTemplate = `{
"description": "Automatically create the data directory if it doesn't exist",
"type": "boolean"
},
"configs_dir": {
"description": "Instance config directory override (relative to data_dir if not absolute)",
"type": "string"
},
"default_auto_restart": {
"description": "Default auto-restart setting for new instances",
"type": "boolean"
@@ -1929,6 +2068,21 @@ const docTemplate = `{
"description": "Enable LRU eviction for instance logs",
"type": "boolean"
},
"logRotationCompress": {
"description": "Whether to compress rotated log files",
"type": "boolean",
"default": false
},
"logRotationEnabled": {
"description": "Log rotation enabled",
"type": "boolean",
"default": true
},
"logRotationMaxSize": {
"description": "Maximum log file size in MB before rotation",
"type": "integer",
"default": 100
},
"logs_dir": {
"description": "Logs directory override (relative to data_dir if not absolute)",
"type": "string"

View File

@@ -992,6 +992,156 @@
}
}
},
"/api/v1/llama-cpp/{name}/models": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a list of models available in the specified llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "List models in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Models list response",
"schema": {
"type": "object",
"additionalProperties": true
}
},
"400": {
"description": "Invalid instance",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/llama-cpp/{name}/models/{model}/load": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Loads the specified model in the given llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "Load a model in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "string",
"description": "Model Name",
"name": "model",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Success message",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/llama-cpp/{name}/models/{model}/unload": {
"post": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Unloads the specified model in the given llama.cpp instance",
"tags": [
"Llama.cpp"
],
"summary": "Unload a model in a llama.cpp instance",
"parameters": [
{
"type": "string",
"description": "Instance Name",
"name": "name",
"in": "path",
"required": true
},
{
"type": "string",
"description": "Model Name",
"name": "model",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Success message",
"schema": {
"type": "object",
"additionalProperties": {
"type": "string"
}
}
},
"400": {
"description": "Invalid request",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/nodes": {
"get": {
"security": [
@@ -1781,13 +1931,6 @@
"config.AuthConfig": {
"type": "object",
"properties": {
"inference_keys": {
"description": "List of keys for OpenAI compatible inference endpoints",
"type": "array",
"items": {
"type": "string"
}
},
"management_keys": {
"description": "List of keys for management endpoints",
"type": "array",
@@ -1898,10 +2041,6 @@
"description": "Automatically create the data directory if it doesn't exist",
"type": "boolean"
},
"configs_dir": {
"description": "Instance config directory override (relative to data_dir if not absolute)",
"type": "string"
},
"default_auto_restart": {
"description": "Default auto-restart setting for new instances",
"type": "boolean"
@@ -1922,6 +2061,21 @@
"description": "Enable LRU eviction for instance logs",
"type": "boolean"
},
"logRotationCompress": {
"description": "Whether to compress rotated log files",
"type": "boolean",
"default": false
},
"logRotationEnabled": {
"description": "Log rotation enabled",
"type": "boolean",
"default": true
},
"logRotationMaxSize": {
"description": "Maximum log file size in MB before rotation",
"type": "integer",
"default": 100
},
"logs_dir": {
"description": "Logs directory override (relative to data_dir if not absolute)",
"type": "string"

View File

@@ -39,11 +39,6 @@ definitions:
type: object
config.AuthConfig:
properties:
inference_keys:
description: List of keys for OpenAI compatible inference endpoints
items:
type: string
type: array
management_keys:
description: List of keys for management endpoints
items:
@@ -118,10 +113,6 @@ definitions:
auto_create_dirs:
description: Automatically create the data directory if it doesn't exist
type: boolean
configs_dir:
description: Instance config directory override (relative to data_dir if not
absolute)
type: string
default_auto_restart:
description: Default auto-restart setting for new instances
type: boolean
@@ -137,6 +128,18 @@ definitions:
enable_lru_eviction:
description: Enable LRU eviction for instance logs
type: boolean
logRotationCompress:
default: false
description: Whether to compress rotated log files
type: boolean
logRotationEnabled:
default: true
description: Log rotation enabled
type: boolean
logRotationMaxSize:
default: 100
description: Maximum log file size in MB before rotation
type: integer
logs_dir:
description: Logs directory override (relative to data_dir if not absolute)
type: string
@@ -955,6 +958,102 @@ paths:
summary: Stop a running instance
tags:
- Instances
/api/v1/llama-cpp/{name}/models:
get:
description: Returns a list of models available in the specified llama.cpp instance
parameters:
- description: Instance Name
in: path
name: name
required: true
type: string
responses:
"200":
description: Models list response
schema:
additionalProperties: true
type: object
"400":
description: Invalid instance
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: List models in a llama.cpp instance
tags:
- Llama.cpp
/api/v1/llama-cpp/{name}/models/{model}/load:
post:
description: Loads the specified model in the given llama.cpp instance
parameters:
- description: Instance Name
in: path
name: name
required: true
type: string
- description: Model Name
in: path
name: model
required: true
type: string
responses:
"200":
description: Success message
schema:
additionalProperties:
type: string
type: object
"400":
description: Invalid request
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Load a model in a llama.cpp instance
tags:
- Llama.cpp
/api/v1/llama-cpp/{name}/models/{model}/unload:
post:
description: Unloads the specified model in the given llama.cpp instance
parameters:
- description: Instance Name
in: path
name: name
required: true
type: string
- description: Model Name
in: path
name: model
required: true
type: string
responses:
"200":
description: Success message
schema:
additionalProperties:
type: string
type: object
"400":
description: Invalid request
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Unload a model in a llama.cpp instance
tags:
- Llama.cpp
/api/v1/nodes:
get:
description: Returns a map of all nodes configured in the server (node name

View File

@@ -32,19 +32,7 @@ func LoadConfig(configPath string) (AppConfig, error) {
// 3. Override with environment variables
loadEnvVars(&cfg)
// Log warning if deprecated inference keys are present
if len(cfg.Auth.InferenceKeys) > 0 {
log.Println("⚠️ Config-based inference keys are no longer supported and will be ignored.")
log.Println(" Please create inference keys in web UI or via management API.")
}
// Set default directories if not specified
if cfg.Instances.InstancesDir == "" {
cfg.Instances.InstancesDir = filepath.Join(cfg.DataDir, "instances")
} else {
// Log deprecation warning if using custom instances dir
log.Println("⚠️ Instances directory is deprecated and will be removed in future versions. Instances are persisted in the database.")
}
if cfg.Instances.LogsDir == "" {
cfg.Instances.LogsDir = filepath.Join(cfg.DataDir, "logs")
}
@@ -101,7 +89,6 @@ func (cfg *AppConfig) SanitizedCopy() (AppConfig, error) {
}
// Clear sensitive information
sanitized.Auth.InferenceKeys = []string{}
sanitized.Auth.ManagementKeys = []string{}
// Clear API keys from nodes

View File

@@ -41,9 +41,6 @@ func TestLoadConfig_Defaults(t *testing.T) {
t.Fatalf("Failed to get user home directory: %v", err)
}
if cfg.Instances.InstancesDir != filepath.Join(homedir, ".local", "share", "llamactl", "instances") {
t.Errorf("Expected default instances directory '%s', got %q", filepath.Join(homedir, ".local", "share", "llamactl", "instances"), cfg.Instances.InstancesDir)
}
if cfg.Instances.LogsDir != filepath.Join(homedir, ".local", "share", "llamactl", "logs") {
t.Errorf("Expected default logs directory '%s', got %q", filepath.Join(homedir, ".local", "share", "llamactl", "logs"), cfg.Instances.LogsDir)
}

View File

@@ -54,9 +54,6 @@ func getDefaultConfig(dataDir string) AppConfig {
},
Instances: InstancesConfig{
PortRange: [2]int{8000, 9000},
// NOTE: empty string is set as placeholder value since InstancesDir
// should be relative path to DataDir if not explicitly set.
InstancesDir: "",
AutoCreateDirs: true,
MaxInstances: -1, // -1 means unlimited
MaxRunningInstances: -1, // -1 means unlimited
@@ -80,7 +77,6 @@ func getDefaultConfig(dataDir string) AppConfig {
},
Auth: AuthConfig{
RequireInferenceAuth: true,
InferenceKeys: []string{},
RequireManagementAuth: true,
ManagementKeys: []string{},
},

View File

@@ -31,9 +31,6 @@ func loadEnvVars(cfg *AppConfig) {
if dataDir := os.Getenv("LLAMACTL_DATA_DIRECTORY"); dataDir != "" {
cfg.DataDir = dataDir
}
if instancesDir := os.Getenv("LLAMACTL_INSTANCES_DIR"); instancesDir != "" {
cfg.Instances.InstancesDir = instancesDir
}
if logsDir := os.Getenv("LLAMACTL_LOGS_DIR"); logsDir != "" {
cfg.Instances.LogsDir = logsDir
}
@@ -220,9 +217,6 @@ func loadEnvVars(cfg *AppConfig) {
cfg.Auth.RequireInferenceAuth = b
}
}
if inferenceKeys := os.Getenv("LLAMACTL_INFERENCE_KEYS"); inferenceKeys != "" {
cfg.Auth.InferenceKeys = strings.Split(inferenceKeys, ",")
}
if requireManagementAuth := os.Getenv("LLAMACTL_REQUIRE_MANAGEMENT_AUTH"); requireManagementAuth != "" {
if b, err := strconv.ParseBool(requireManagementAuth); err == nil {
cfg.Auth.RequireManagementAuth = b

View File

@@ -81,9 +81,6 @@ type InstancesConfig struct {
// Port range for instances (e.g., 8000,9000)
PortRange [2]int `yaml:"port_range" json:"port_range"`
// Instance config directory override (relative to data_dir if not absolute)
InstancesDir string `yaml:"configs_dir" json:"configs_dir"`
// Automatically create the data directory if it doesn't exist
AutoCreateDirs bool `yaml:"auto_create_dirs" json:"auto_create_dirs"`
@@ -133,9 +130,6 @@ type AuthConfig struct {
// Require authentication for OpenAI compatible inference endpoints
RequireInferenceAuth bool `yaml:"require_inference_auth" json:"require_inference_auth"`
// List of keys for OpenAI compatible inference endpoints
InferenceKeys []string `yaml:"inference_keys" json:"inference_keys"`
// Require authentication for management endpoints
RequireManagementAuth bool `yaml:"require_management_auth" json:"require_management_auth"`

View File

@@ -202,7 +202,6 @@ func createTestAppConfig(instancesDir string) *config.AppConfig {
},
Instances: config.InstancesConfig{
PortRange: [2]int{8000, 9000},
InstancesDir: instancesDir,
MaxInstances: 10,
MaxRunningInstances: 10,
DefaultAutoRestart: true,

View File

@@ -38,7 +38,6 @@ func TestCreateInstance_FailsWithDuplicateName(t *testing.T) {
}
func TestCreateInstance_FailsWhenMaxInstancesReached(t *testing.T) {
tempDir := t.TempDir()
appConfig := &config.AppConfig{
Backends: config.BackendConfig{
LlamaCpp: config.BackendSettings{
@@ -47,7 +46,6 @@ func TestCreateInstance_FailsWhenMaxInstancesReached(t *testing.T) {
},
Instances: config.InstancesConfig{
PortRange: [2]int{8000, 9000},
InstancesDir: tempDir,
MaxInstances: 1, // Very low limit for testing
TimeoutCheckInterval: 5,
},

View File

@@ -275,16 +275,3 @@ func TestAutoGenerationScenarios(t *testing.T) {
})
}
}
func TestConfigBasedInferenceKeysDeprecationWarning(t *testing.T) {
// Test that config-based inference keys trigger a warning (captured in logs)
cfg := config.AuthConfig{
InferenceKeys: []string{"sk-inference-old"},
}
// Creating middleware should log a warning, but shouldn't fail
_ = server.NewAPIAuthMiddleware(cfg, nil)
// If we get here without panic, the test passes
// The warning is logged but not returned as an error
}

View File

@@ -30,7 +30,6 @@ export interface ServerConfig {
export interface InstancesConfig {
port_range: [number, number]
configs_dir: string
logs_dir: string
auto_create_dirs: boolean
max_instances: number
@@ -53,7 +52,6 @@ export interface DatabaseConfig {
export interface AuthConfig {
require_inference_auth: boolean
inference_keys: string[] // Will be empty in sanitized response
require_management_auth: boolean
management_keys: string[] // Will be empty in sanitized response
}