import React, { useState } from "react"; import { Button } from "@/components/ui/button"; import { Label } from "@/components/ui/label"; import { Dialog, DialogContent, DialogDescription, DialogFooter, DialogHeader, DialogTitle, } from "@/components/ui/dialog"; import { BackendType, type BackendTypeValue, type CreateInstanceOptions } from "@/types/instance"; import { backendsApi } from "@/lib/api"; import { toast } from "sonner"; interface ParseCommandDialogProps { open: boolean; onOpenChange: (open: boolean) => void; onParsed: (options: CreateInstanceOptions) => void; } const ParseCommandDialog: React.FC = ({ open, onOpenChange, onParsed, }) => { const [command, setCommand] = useState(''); const [backendType, setBackendType] = useState(BackendType.LLAMA_CPP); const [loading, setLoading] = useState(false); const [error, setError] = useState(null); const handleParse = async () => { if (!command.trim()) { setError("Command cannot be empty"); return; } setLoading(true); setError(null); try { let options: CreateInstanceOptions; // Parse based on selected backend type switch (backendType) { case BackendType.LLAMA_CPP: options = await backendsApi.llamaCpp.parseCommand(command); break; case BackendType.MLX_LM: options = await backendsApi.mlx.parseCommand(command); break; case BackendType.VLLM: options = await backendsApi.vllm.parseCommand(command); break; default: throw new Error(`Unsupported backend type: ${backendType}`); } onParsed(options); onOpenChange(false); setCommand(''); setError(null); toast.success('Command parsed successfully'); } catch (err) { const errorMessage = err instanceof Error ? err.message : 'Failed to parse command'; setError(errorMessage); toast.error('Failed to parse command', { description: errorMessage }); } finally { setLoading(false); } }; const handleOpenChange = (open: boolean) => { if (!open) { setCommand(''); setBackendType(BackendType.LLAMA_CPP); setError(null); } onOpenChange(open); }; const backendPlaceholders: Record = { [BackendType.LLAMA_CPP]: "llama-server --model /path/to/model.gguf --gpu-layers 32 --ctx-size 4096", [BackendType.MLX_LM]: "mlx_lm.server --model mlx-community/Mistral-7B-Instruct-v0.3-4bit --host 0.0.0.0 --port 8080", [BackendType.VLLM]: "vllm serve --model microsoft/DialoGPT-medium --tensor-parallel-size 2 --gpu-memory-utilization 0.9", }; const getPlaceholderForBackend = (backendType: BackendTypeValue): string => { return backendPlaceholders[backendType] || "Enter your command here..."; }; return ( Parse Backend Command Select your backend type and paste the command to automatically populate the form fields