diff --git a/pkg/instance/instance.go b/pkg/instance/instance.go index a6c9657..3781fcf 100644 --- a/pkg/instance/instance.go +++ b/pkg/instance/instance.go @@ -147,6 +147,11 @@ func (i *Process) SetOptions(options *CreateInstanceOptions) { return } + // Preserve the original nodes to prevent changing instance location + if i.options != nil && i.options.Nodes != nil { + options.Nodes = i.options.Nodes + } + // Validate and copy options options.ValidateAndApplyDefaults(i.Name, i.globalInstanceSettings) diff --git a/pkg/instance/instance_test.go b/pkg/instance/instance_test.go index 14ff78d..0402ac4 100644 --- a/pkg/instance/instance_test.go +++ b/pkg/instance/instance_test.go @@ -191,6 +191,58 @@ func TestSetOptions(t *testing.T) { } } +func TestSetOptions_PreservesNodes(t *testing.T) { + backendConfig := &config.BackendConfig{ + LlamaCpp: config.BackendSettings{ + Command: "llama-server", + Args: []string{}, + }, + } + + globalSettings := &config.InstancesConfig{ + LogsDir: "/tmp/test", + DefaultAutoRestart: true, + DefaultMaxRestarts: 3, + DefaultRestartDelay: 5, + } + + // Create instance with initial nodes + initialOptions := &instance.CreateInstanceOptions{ + BackendType: backends.BackendTypeLlamaCpp, + Nodes: []string{"worker1"}, + LlamaServerOptions: &llamacpp.LlamaServerOptions{ + Model: "/path/to/model.gguf", + Port: 8080, + }, + } + + mockOnStatusChange := func(oldStatus, newStatus instance.InstanceStatus) {} + inst := instance.NewInstance("test-instance", backendConfig, globalSettings, initialOptions, "main", mockOnStatusChange) + + // Try to update with different nodes + updatedOptions := &instance.CreateInstanceOptions{ + BackendType: backends.BackendTypeLlamaCpp, + Nodes: []string{"worker2"}, // Attempt to change node + LlamaServerOptions: &llamacpp.LlamaServerOptions{ + Model: "/path/to/new-model.gguf", + Port: 8081, + }, + } + + inst.SetOptions(updatedOptions) + opts := inst.GetOptions() + + // Nodes should remain unchanged + if len(opts.Nodes) != 1 || opts.Nodes[0] != "worker1" { + t.Errorf("Expected nodes to remain ['worker1'], got %v", opts.Nodes) + } + + // Other options should be updated + if opts.LlamaServerOptions.Model != "/path/to/new-model.gguf" { + t.Errorf("Expected updated model '/path/to/new-model.gguf', got %q", opts.LlamaServerOptions.Model) + } +} + func TestGetProxy(t *testing.T) { backendConfig := &config.BackendConfig{ LlamaCpp: config.BackendSettings{ diff --git a/webui/src/components/instance/InstanceSettingsCard.tsx b/webui/src/components/instance/InstanceSettingsCard.tsx index a89ee90..1834eab 100644 --- a/webui/src/components/instance/InstanceSettingsCard.tsx +++ b/webui/src/components/instance/InstanceSettingsCard.tsx @@ -100,7 +100,8 @@ const InstanceSettingsCard: React.FC = ({ value={selectedNode} onChange={handleNodeChange} options={nodeOptions} - description="Select the node where the instance will run (default: main node)" + description={isEditing ? "Node cannot be changed after instance creation" : "Select the node where the instance will run"} + disabled={isEditing} /> )}