Implement more manager tests

This commit is contained in:
2025-10-21 22:07:10 +02:00
parent 4d05fcea46
commit 6afe120a0e
2 changed files with 325 additions and 8 deletions

View File

@@ -27,7 +27,7 @@ func TestNewInstanceManager(t *testing.T) {
} }
} }
func TestPersistence_SaveAndLoad(t *testing.T) { func TestManager_PersistsAndLoadsInstances(t *testing.T) {
tempDir := t.TempDir() tempDir := t.TempDir()
cfg := createPersistenceConfig(tempDir) cfg := createPersistenceConfig(tempDir)
backendConfig := createBackendConfig() backendConfig := createBackendConfig()
@@ -68,7 +68,7 @@ func TestPersistence_SaveAndLoad(t *testing.T) {
} }
} }
func TestPersistence_DeleteRemovesFile(t *testing.T) { func TestDeleteInstance_RemovesPersistenceFile(t *testing.T) {
tempDir := t.TempDir() tempDir := t.TempDir()
cfg := createPersistenceConfig(tempDir) cfg := createPersistenceConfig(tempDir)
backendConfig := createBackendConfig() backendConfig := createBackendConfig()
@@ -175,12 +175,15 @@ func TestShutdown(t *testing.T) {
// Helper functions for test configuration // Helper functions for test configuration
func createBackendConfig() config.BackendConfig { func createBackendConfig() config.BackendConfig {
// Use 'sleep' as a test command instead of 'llama-server'
// This allows tests to run in CI environments without requiring actual LLM binaries
// The sleep command will be invoked with model paths and other args, which it ignores
return config.BackendConfig{ return config.BackendConfig{
LlamaCpp: config.BackendSettings{ LlamaCpp: config.BackendSettings{
Command: "llama-server", Command: "sleep",
}, },
MLX: config.BackendSettings{ MLX: config.BackendSettings{
Command: "mlx_lm.server", Command: "sleep",
}, },
} }
} }
@@ -199,6 +202,7 @@ func createTestManager() manager.InstanceManager {
PortRange: [2]int{8000, 9000}, PortRange: [2]int{8000, 9000},
LogsDir: "/tmp/test", LogsDir: "/tmp/test",
MaxInstances: 10, MaxInstances: 10,
MaxRunningInstances: 10,
DefaultAutoRestart: true, DefaultAutoRestart: true,
DefaultMaxRestarts: 3, DefaultMaxRestarts: 3,
DefaultRestartDelay: 5, DefaultRestartDelay: 5,
@@ -207,7 +211,7 @@ func createTestManager() manager.InstanceManager {
return manager.New(createBackendConfig(), cfg, map[string]config.NodeConfig{}, "main") return manager.New(createBackendConfig(), cfg, map[string]config.NodeConfig{}, "main")
} }
func TestAutoRestartDisabledInstanceStatus(t *testing.T) { func TestManager_DoesNotAutoRestartWhenDisabled(t *testing.T) {
tempDir := t.TempDir() tempDir := t.TempDir()
cfg := createPersistenceConfig(tempDir) cfg := createPersistenceConfig(tempDir)
backendConfig := createBackendConfig() backendConfig := createBackendConfig()

View File

@@ -101,7 +101,7 @@ func TestCreateInstance_MaxInstancesLimit(t *testing.T) {
} }
} }
func TestPort_AutoAssignment(t *testing.T) { func TestCreateInstance_AutoAssignsPort(t *testing.T) {
manager := createTestManager() manager := createTestManager()
options := &instance.Options{ options := &instance.Options{
@@ -124,7 +124,7 @@ func TestPort_AutoAssignment(t *testing.T) {
} }
} }
func TestPort_ConflictDetection(t *testing.T) { func TestCreateInstance_PortConflict(t *testing.T) {
manager := createTestManager() manager := createTestManager()
options1 := &instance.Options{ options1 := &instance.Options{
@@ -162,7 +162,7 @@ func TestPort_ConflictDetection(t *testing.T) {
} }
} }
func TestPort_ReleaseOnDeletion(t *testing.T) { func TestDeleteInstance_ReleasesPort(t *testing.T) {
manager := createTestManager() manager := createTestManager()
options := &instance.Options{ options := &instance.Options{
@@ -336,3 +336,316 @@ func TestInstanceOperations_NonExistentInstance(t *testing.T) {
t.Errorf("Expected 'not found' error, got: %v", err) t.Errorf("Expected 'not found' error, got: %v", err)
} }
} }
func TestStartInstance(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
inst, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
if inst.IsRunning() {
t.Error("New instance should not be running")
}
// Start the instance
started, err := mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("StartInstance failed: %v", err)
}
if !started.IsRunning() {
t.Error("Instance should be running after start")
}
}
func TestStartInstance_Idempotent(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
inst, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
// Start the instance
_, err = mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("First StartInstance failed: %v", err)
}
// Start again - should be idempotent
started, err := mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("Second StartInstance failed: %v", err)
}
if !started.IsRunning() {
t.Error("Instance should still be running")
}
if inst.GetStatus() != instance.Running {
t.Errorf("Expected Running status, got %v", inst.GetStatus())
}
}
func TestStopInstance(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
_, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
_, err = mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("StartInstance failed: %v", err)
}
// Stop the instance
stopped, err := mgr.StopInstance("test-instance")
if err != nil {
t.Fatalf("StopInstance failed: %v", err)
}
if stopped.IsRunning() {
t.Error("Instance should not be running after stop")
}
}
func TestStopInstance_Idempotent(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
inst, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
// Stop when already stopped - should be idempotent
stopped, err := mgr.StopInstance("test-instance")
if err != nil {
t.Fatalf("StopInstance failed: %v", err)
}
if stopped.IsRunning() {
t.Error("Instance should not be running")
}
if inst.GetStatus() != instance.Stopped {
t.Errorf("Expected Stopped status, got %v", inst.GetStatus())
}
}
func TestRestartInstance(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
_, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
_, err = mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("StartInstance failed: %v", err)
}
// Restart the instance
restarted, err := mgr.RestartInstance("test-instance")
if err != nil {
t.Fatalf("RestartInstance failed: %v", err)
}
if !restarted.IsRunning() {
t.Error("Instance should be running after restart")
}
}
func TestDeleteInstance_RunningInstanceFails(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
},
},
}
_, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
_, err = mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("StartInstance failed: %v", err)
}
// Should fail to delete running instance
err = mgr.DeleteInstance("test-instance")
if err == nil {
t.Error("Expected error when deleting running instance")
}
}
func TestUpdateInstance_OnRunningInstance(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
Port: 8080,
},
},
}
_, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
_, err = mgr.StartInstance("test-instance")
if err != nil {
t.Fatalf("StartInstance failed: %v", err)
}
// Update running instance with new model
newOptions := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/new-model.gguf",
Port: 8080,
},
},
}
updated, err := mgr.UpdateInstance("test-instance", newOptions)
if err != nil {
t.Fatalf("UpdateInstance failed: %v", err)
}
// Should still be running after update
if !updated.IsRunning() {
t.Error("Instance should be running after update")
}
if updated.GetOptions().BackendOptions.LlamaServerOptions.Model != "/path/to/new-model.gguf" {
t.Errorf("Expected model to be updated")
}
}
func TestUpdateInstance_PortChange(t *testing.T) {
mgr := createTestManager()
defer mgr.Shutdown()
options := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
Port: 8080,
},
},
}
inst, err := mgr.CreateInstance("test-instance", options)
if err != nil {
t.Fatalf("CreateInstance failed: %v", err)
}
if inst.GetPort() != 8080 {
t.Errorf("Expected port 8080, got %d", inst.GetPort())
}
// Update with new port
newOptions := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model.gguf",
Port: 8081,
},
},
}
updated, err := mgr.UpdateInstance("test-instance", newOptions)
if err != nil {
t.Fatalf("UpdateInstance failed: %v", err)
}
if updated.GetPort() != 8081 {
t.Errorf("Expected port 8081, got %d", updated.GetPort())
}
// Old port should be released - try to create new instance with old port
options2 := &instance.Options{
BackendOptions: backends.Options{
BackendType: backends.BackendTypeLlamaCpp,
LlamaServerOptions: &backends.LlamaServerOptions{
Model: "/path/to/model2.gguf",
Port: 8080,
},
},
}
_, err = mgr.CreateInstance("test-instance-2", options2)
if err != nil {
t.Errorf("Should be able to use old port 8080: %v", err)
}
}