From 30e40ecd3086aafb458ed743561f54bf30d94de1 Mon Sep 17 00:00:00 2001 From: LordMathis Date: Tue, 23 Sep 2025 21:27:58 +0200 Subject: [PATCH 1/4] Refactor API endpoints to use /backends/llama-cpp path and update related documentation --- apidocs/docs.go | 168 ++++++++++++++++++++--------------------- apidocs/swagger.json | 168 ++++++++++++++++++++--------------------- apidocs/swagger.yaml | 102 ++++++++++++------------- pkg/server/handlers.go | 12 +-- pkg/server/routes.go | 9 +-- 5 files changed, 228 insertions(+), 231 deletions(-) diff --git a/apidocs/docs.go b/apidocs/docs.go index d5108e1..4b521b1 100644 --- a/apidocs/docs.go +++ b/apidocs/docs.go @@ -19,6 +19,62 @@ const docTemplate = `{ "host": "{{.Host}}", "basePath": "{{.BasePath}}", "paths": { + "/backends/llama-cpp/devices": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns a list of available devices for the llama server", + "tags": [ + "backends" + ], + "summary": "List available devices for llama server", + "responses": { + "200": { + "description": "List of devices", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/backends/llama-cpp/help": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns the help text for the llama server command", + "tags": [ + "backends" + ], + "summary": "Get help for llama server", + "responses": { + "200": { + "description": "Help text", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, "/backends/llama-cpp/parse-command": { "post": { "security": [ @@ -76,6 +132,34 @@ const docTemplate = `{ } } }, + "/backends/llama-cpp/version": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns the version of the llama server command", + "tags": [ + "backends" + ], + "summary": "Get version of llama server", + "responses": { + "200": { + "description": "Version information", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, "/backends/mlx/parse-command": { "post": { "security": [ @@ -658,90 +742,6 @@ const docTemplate = `{ } } }, - "/server/devices": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns a list of available devices for the llama server", - "tags": [ - "server" - ], - "summary": "List available devices for llama server", - "responses": { - "200": { - "description": "List of devices", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, - "/server/help": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns the help text for the llama server command", - "tags": [ - "server" - ], - "summary": "Get help for llama server", - "responses": { - "200": { - "description": "Help text", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, - "/server/version": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns the version of the llama server command", - "tags": [ - "server" - ], - "summary": "Get version of llama server", - "responses": { - "200": { - "description": "Version information", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, "/v1/": { "post": { "security": [ diff --git a/apidocs/swagger.json b/apidocs/swagger.json index 9648cc0..71471e6 100644 --- a/apidocs/swagger.json +++ b/apidocs/swagger.json @@ -12,6 +12,62 @@ }, "basePath": "/api/v1", "paths": { + "/backends/llama-cpp/devices": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns a list of available devices for the llama server", + "tags": [ + "backends" + ], + "summary": "List available devices for llama server", + "responses": { + "200": { + "description": "List of devices", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/backends/llama-cpp/help": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns the help text for the llama server command", + "tags": [ + "backends" + ], + "summary": "Get help for llama server", + "responses": { + "200": { + "description": "Help text", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, "/backends/llama-cpp/parse-command": { "post": { "security": [ @@ -69,6 +125,34 @@ } } }, + "/backends/llama-cpp/version": { + "get": { + "security": [ + { + "ApiKeyAuth": [] + } + ], + "description": "Returns the version of the llama server command", + "tags": [ + "backends" + ], + "summary": "Get version of llama server", + "responses": { + "200": { + "description": "Version information", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, "/backends/mlx/parse-command": { "post": { "security": [ @@ -651,90 +735,6 @@ } } }, - "/server/devices": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns a list of available devices for the llama server", - "tags": [ - "server" - ], - "summary": "List available devices for llama server", - "responses": { - "200": { - "description": "List of devices", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, - "/server/help": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns the help text for the llama server command", - "tags": [ - "server" - ], - "summary": "Get help for llama server", - "responses": { - "200": { - "description": "Help text", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, - "/server/version": { - "get": { - "security": [ - { - "ApiKeyAuth": [] - } - ], - "description": "Returns the version of the llama server command", - "tags": [ - "server" - ], - "summary": "Get version of llama server", - "responses": { - "200": { - "description": "Version information", - "schema": { - "type": "string" - } - }, - "500": { - "description": "Internal Server Error", - "schema": { - "type": "string" - } - } - } - } - }, "/v1/": { "post": { "security": [ diff --git a/apidocs/swagger.yaml b/apidocs/swagger.yaml index 4b68861..a5db184 100644 --- a/apidocs/swagger.yaml +++ b/apidocs/swagger.yaml @@ -88,6 +88,40 @@ info: title: llamactl API version: "1.0" paths: + /backends/llama-cpp/devices: + get: + description: Returns a list of available devices for the llama server + responses: + "200": + description: List of devices + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + security: + - ApiKeyAuth: [] + summary: List available devices for llama server + tags: + - backends + /backends/llama-cpp/help: + get: + description: Returns the help text for the llama server command + responses: + "200": + description: Help text + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + security: + - ApiKeyAuth: [] + summary: Get help for llama server + tags: + - backends /backends/llama-cpp/parse-command: post: consumes: @@ -124,6 +158,23 @@ paths: summary: Parse llama-server command tags: - backends + /backends/llama-cpp/version: + get: + description: Returns the version of the llama server command + responses: + "200": + description: Version information + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + security: + - ApiKeyAuth: [] + summary: Get version of llama server + tags: + - backends /backends/mlx/parse-command: post: consumes: @@ -494,57 +545,6 @@ paths: summary: Stop a running instance tags: - instances - /server/devices: - get: - description: Returns a list of available devices for the llama server - responses: - "200": - description: List of devices - schema: - type: string - "500": - description: Internal Server Error - schema: - type: string - security: - - ApiKeyAuth: [] - summary: List available devices for llama server - tags: - - server - /server/help: - get: - description: Returns the help text for the llama server command - responses: - "200": - description: Help text - schema: - type: string - "500": - description: Internal Server Error - schema: - type: string - security: - - ApiKeyAuth: [] - summary: Get help for llama server - tags: - - server - /server/version: - get: - description: Returns the version of the llama server command - responses: - "200": - description: Version information - schema: - type: string - "500": - description: Internal Server Error - schema: - type: string - security: - - ApiKeyAuth: [] - summary: Get version of llama server - tags: - - server /v1/: post: consumes: diff --git a/pkg/server/handlers.go b/pkg/server/handlers.go index d1c4d08..594c273 100644 --- a/pkg/server/handlers.go +++ b/pkg/server/handlers.go @@ -51,12 +51,12 @@ func (h *Handler) VersionHandler() http.HandlerFunc { // LlamaServerHelpHandler godoc // @Summary Get help for llama server // @Description Returns the help text for the llama server command -// @Tags server +// @Tags backends // @Security ApiKeyAuth // @Produces text/plain // @Success 200 {string} string "Help text" // @Failure 500 {string} string "Internal Server Error" -// @Router /server/help [get] +// @Router /backends/llama-cpp/help [get] func (h *Handler) LlamaServerHelpHandler() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { helpCmd := exec.Command("llama-server", "--help") @@ -73,12 +73,12 @@ func (h *Handler) LlamaServerHelpHandler() http.HandlerFunc { // LlamaServerVersionHandler godoc // @Summary Get version of llama server // @Description Returns the version of the llama server command -// @Tags server +// @Tags backends // @Security ApiKeyAuth // @Produces text/plain // @Success 200 {string} string "Version information" // @Failure 500 {string} string "Internal Server Error" -// @Router /server/version [get] +// @Router /backends/llama-cpp/version [get] func (h *Handler) LlamaServerVersionHandler() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { versionCmd := exec.Command("llama-server", "--version") @@ -95,12 +95,12 @@ func (h *Handler) LlamaServerVersionHandler() http.HandlerFunc { // LlamaServerListDevicesHandler godoc // @Summary List available devices for llama server // @Description Returns a list of available devices for the llama server -// @Tags server +// @Tags backends // @Security ApiKeyAuth // @Produces text/plain // @Success 200 {string} string "List of devices" // @Failure 500 {string} string "Internal Server Error" -// @Router /server/devices [get] +// @Router /backends/llama-cpp/devices [get] func (h *Handler) LlamaServerListDevicesHandler() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { listCmd := exec.Command("llama-server", "--list-devices") diff --git a/pkg/server/routes.go b/pkg/server/routes.go index 898b574..6af6a5c 100644 --- a/pkg/server/routes.go +++ b/pkg/server/routes.go @@ -44,15 +44,12 @@ func SetupRouter(handler *Handler) *chi.Mux { r.Get("/version", handler.VersionHandler()) // Get server version - r.Route("/server", func(r chi.Router) { - r.Get("/help", handler.LlamaServerHelpHandler()) - r.Get("/version", handler.LlamaServerVersionHandler()) - r.Get("/devices", handler.LlamaServerListDevicesHandler()) - }) - // Backend-specific endpoints r.Route("/backends", func(r chi.Router) { r.Route("/llama-cpp", func(r chi.Router) { + r.Get("/help", handler.LlamaServerHelpHandler()) + r.Get("/version", handler.LlamaServerVersionHandler()) + r.Get("/devices", handler.LlamaServerListDevicesHandler()) r.Post("/parse-command", handler.ParseLlamaCommand()) }) r.Route("/mlx", func(r chi.Router) { From 71a48aa3b672de13f4161b04deb8053cb5669be0 Mon Sep 17 00:00:00 2001 From: LordMathis Date: Tue, 23 Sep 2025 21:28:23 +0200 Subject: [PATCH 2/4] Update server API functions to use /backends/llama-cpp path --- webui/src/lib/api.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/webui/src/lib/api.ts b/webui/src/lib/api.ts index a170246..be9ab80 100644 --- a/webui/src/lib/api.ts +++ b/webui/src/lib/api.ts @@ -63,16 +63,16 @@ async function apiCall( } } -// Server API functions +// Server API functions (moved to llama-cpp backend) export const serverApi = { - // GET /server/help - getHelp: () => apiCall("/server/help", {}, "text"), + // GET /backends/llama-cpp/help + getHelp: () => apiCall("/backends/llama-cpp/help", {}, "text"), - // GET /server/version - getVersion: () => apiCall("/server/version", {}, "text"), + // GET /backends/llama-cpp/version + getVersion: () => apiCall("/backends/llama-cpp/version", {}, "text"), - // GET /server/devices - getDevices: () => apiCall("/server/devices", {}, "text"), + // GET /backends/llama-cpp/devices + getDevices: () => apiCall("/backends/llama-cpp/devices", {}, "text"), }; // Backend API functions From edf05759259f28132334c181361dd727584e3b70 Mon Sep 17 00:00:00 2001 From: LordMathis Date: Tue, 23 Sep 2025 21:44:04 +0200 Subject: [PATCH 3/4] Replace SystemInfoDialog with BackendInfoDialog and update related references --- webui/src/App.tsx | 16 +- webui/src/components/BackendInfoDialog.tsx | 274 +++++++++++++++++++++ webui/src/components/SystemInfoDialog.tsx | 203 --------------- webui/src/lib/api.ts | 2 +- 4 files changed, 283 insertions(+), 212 deletions(-) create mode 100644 webui/src/components/BackendInfoDialog.tsx delete mode 100644 webui/src/components/SystemInfoDialog.tsx diff --git a/webui/src/App.tsx b/webui/src/App.tsx index 04c8c01..b58e54f 100644 --- a/webui/src/App.tsx +++ b/webui/src/App.tsx @@ -3,7 +3,7 @@ import Header from "@/components/Header"; import InstanceList from "@/components/InstanceList"; import InstanceDialog from "@/components/InstanceDialog"; import LoginDialog from "@/components/LoginDialog"; -import SystemInfoDialog from "./components/SystemInfoDialog"; +import BackendInfoDialog from "./components/BackendInfoDialog"; import { type CreateInstanceOptions, type Instance } from "@/types/instance"; import { useInstances } from "@/contexts/InstancesContext"; import { useAuth } from "@/contexts/AuthContext"; @@ -13,7 +13,7 @@ import { Toaster } from "sonner"; function App() { const { isAuthenticated, isLoading: authLoading } = useAuth(); const [isInstanceModalOpen, setIsInstanceModalOpen] = useState(false); - const [isSystemInfoModalOpen, setIsSystemInfoModalOpen] = useState(false); + const [isBackendInfoModalOpen, setIsBackendInfoModalOpen] = useState(false); const [editingInstance, setEditingInstance] = useState( undefined ); @@ -37,8 +37,8 @@ function App() { } }; - const handleShowSystemInfo = () => { - setIsSystemInfoModalOpen(true); + const handleShowBackendInfo = () => { + setIsBackendInfoModalOpen(true); }; // Show loading spinner while checking auth @@ -70,7 +70,7 @@ function App() { return (
-
+
@@ -82,9 +82,9 @@ function App() { instance={editingInstance} /> - diff --git a/webui/src/components/BackendInfoDialog.tsx b/webui/src/components/BackendInfoDialog.tsx new file mode 100644 index 0000000..9ce0881 --- /dev/null +++ b/webui/src/components/BackendInfoDialog.tsx @@ -0,0 +1,274 @@ +import React, { useState, useEffect } from 'react' +import { Button } from '@/components/ui/button' +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from '@/components/ui/dialog' +import SelectInput from '@/components/form/SelectInput' +import { + RefreshCw, + AlertCircle, + Loader2, + ChevronDown, + ChevronRight, + Monitor, + HelpCircle, + Info +} from 'lucide-react' +import { serverApi } from '@/lib/api' + +// Helper to get version from environment +const getAppVersion = (): string => { + try { + return (import.meta.env as Record).VITE_APP_VERSION || 'unknown' + } catch { + return 'unknown' + } +} + +interface BackendInfoDialogProps { + open: boolean + onOpenChange: (open: boolean) => void +} + +interface BackendInfo { + version: string + devices: string + help: string +} + +type BackendType = 'llama-cpp' | 'mlx' | 'vllm' + +const BACKEND_OPTIONS = [ + { value: 'llama-cpp', label: 'Llama.cpp' }, + { value: 'mlx', label: 'MLX' }, + { value: 'vllm', label: 'vLLM' }, +] as const + +const BackendInfoDialog: React.FC = ({ + open, + onOpenChange +}) => { + const [selectedBackend, setSelectedBackend] = useState('llama-cpp') + const [backendInfo, setBackendInfo] = useState(null) + const [loading, setLoading] = useState(false) + const [error, setError] = useState(null) + const [showHelp, setShowHelp] = useState(false) + + // Fetch backend info + const fetchBackendInfo = async (backend: BackendType) => { + if (backend !== 'llama-cpp') { + setBackendInfo(null) + setError(null) + return + } + + setLoading(true) + setError(null) + + try { + const [version, devices, help] = await Promise.all([ + serverApi.getVersion(), + serverApi.getDevices(), + serverApi.getHelp() + ]) + + setBackendInfo({ version, devices, help }) + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to fetch backend info') + } finally { + setLoading(false) + } + } + + // Load data when dialog opens or backend changes + useEffect(() => { + if (open) { + fetchBackendInfo(selectedBackend) + } + }, [open, selectedBackend]) + + const handleBackendChange = (value: string) => { + setSelectedBackend(value as BackendType) + setShowHelp(false) // Reset help section when switching backends + } + + const renderBackendContent = () => { + if (selectedBackend !== 'llama-cpp') { + return ( +
+
+ +
+

Backend Info Not Available

+

+ Information for {BACKEND_OPTIONS.find(b => b.value === selectedBackend)?.label} backend is not yet implemented. +

+
+
+
+ ) + } + + if (loading && !backendInfo) { + return ( +
+ + Loading backend information... +
+ ) + } + + if (error) { + return ( +
+ + {error} +
+ ) + } + + if (!backendInfo) { + return null + } + + return ( +
+ {/* Llamactl Version Section */} +
+

Llamactl Version

+ +
+
+              {getAppVersion()}
+            
+
+
+ + {/* Backend Version Section */} +
+

+ {BACKEND_OPTIONS.find(b => b.value === selectedBackend)?.label} Version +

+ +
+
+ $ llama-server --version +
+
+              {backendInfo.version}
+            
+
+
+ + {/* Devices Section */} +
+
+

Available Devices

+
+ +
+
+ $ llama-server --list-devices +
+
+              {backendInfo.devices}
+            
+
+
+ + {/* Help Section */} +
+ + + {showHelp && ( +
+
+ $ llama-server --help +
+
+                {backendInfo.help}
+              
+
+ )} +
+
+ ) + } + + return ( + + + +
+
+ + + Backend Information + + + View backend-specific environment and capabilities + +
+ +
+
+ handleBackendChange(value || 'llama-cpp')} + options={BACKEND_OPTIONS} + className="text-sm" + /> +
+ + {selectedBackend === 'llama-cpp' && ( + + )} +
+
+
+ +
+ {renderBackendContent()} +
+ + + + +
+
+ ) +} + +export default BackendInfoDialog \ No newline at end of file diff --git a/webui/src/components/SystemInfoDialog.tsx b/webui/src/components/SystemInfoDialog.tsx deleted file mode 100644 index 8eb03f5..0000000 --- a/webui/src/components/SystemInfoDialog.tsx +++ /dev/null @@ -1,203 +0,0 @@ -import React, { useState, useEffect } from 'react' -import { Button } from '@/components/ui/button' -import { - Dialog, - DialogContent, - DialogDescription, - DialogFooter, - DialogHeader, - DialogTitle, -} from '@/components/ui/dialog' -import { - RefreshCw, - AlertCircle, - Loader2, - ChevronDown, - ChevronRight, - Monitor, - HelpCircle -} from 'lucide-react' -import { serverApi } from '@/lib/api' - -// Helper to get version from environment -const getAppVersion = (): string => { - try { - return (import.meta.env as Record).VITE_APP_VERSION || 'unknown' - } catch { - return 'unknown' - } -} - -interface SystemInfoModalProps { - open: boolean - onOpenChange: (open: boolean) => void -} - -interface SystemInfo { - version: string - devices: string - help: string -} - -const SystemInfoDialog: React.FC = ({ - open, - onOpenChange -}) => { - const [systemInfo, setSystemInfo] = useState(null) - const [loading, setLoading] = useState(false) - const [error, setError] = useState(null) - const [showHelp, setShowHelp] = useState(false) - - // Fetch system info - const fetchSystemInfo = async () => { - setLoading(true) - setError(null) - - try { - const [version, devices, help] = await Promise.all([ - serverApi.getVersion(), - serverApi.getDevices(), - serverApi.getHelp() - ]) - - setSystemInfo({ version, devices, help }) - } catch (err) { - setError(err instanceof Error ? err.message : 'Failed to fetch system info') - } finally { - setLoading(false) - } - } - - // Load data when dialog opens - useEffect(() => { - if (open) { - fetchSystemInfo() - } - }, [open]) - - return ( - - - -
-
- - - System Information - - - Llama.cpp server environment and capabilities - -
- - -
-
- -
- {loading && !systemInfo ? ( -
- - Loading system information... -
- ) : error ? ( -
- - {error} -
- ) : systemInfo ? ( -
- {/* Llamactl Version Section */} -
-

Llamactl Version

- -
-
-                    {getAppVersion()}
-                  
-
-
- - {/* Llama Server Version Section */} -
-

Llama Server Version

- -
-
- $ llama-server --version -
-
-                    {systemInfo.version}
-                  
-
-
- - {/* Devices Section */} -
-
-

Available Devices

-
- -
-
- $ llama-server --list-devices -
-
-                    {systemInfo.devices}
-                  
-
-
- - {/* Help Section */} -
- - - {showHelp && ( -
-
- $ llama-server --help -
-
-                      {systemInfo.help}
-                    
-
- )} -
-
- ) : null} -
- - - - -
-
- ) -} - -export default SystemInfoDialog \ No newline at end of file diff --git a/webui/src/lib/api.ts b/webui/src/lib/api.ts index be9ab80..5bd7991 100644 --- a/webui/src/lib/api.ts +++ b/webui/src/lib/api.ts @@ -156,5 +156,5 @@ export const instancesApi = { }, // GET /instances/{name}/proxy/health - getHealth: (name: string) => apiCall(`/instances/${name}/proxy/health`), + getHealth: (name: string) => apiCall>(`/instances/${name}/proxy/health`), }; From e3bf8ac05adea22bcee1385b384a02f956aff8e0 Mon Sep 17 00:00:00 2001 From: LordMathis Date: Tue, 23 Sep 2025 22:05:31 +0200 Subject: [PATCH 4/4] Update SystemInfo dialog --- webui/src/App.tsx | 16 +- webui/src/components/ParseCommandDialog.tsx | 6 +- ...endInfoDialog.tsx => SystemInfoDialog.tsx} | 140 +++++++++--------- .../instance/BackendConfigurationCard.tsx | 6 +- 4 files changed, 84 insertions(+), 84 deletions(-) rename webui/src/components/{BackendInfoDialog.tsx => SystemInfoDialog.tsx} (66%) diff --git a/webui/src/App.tsx b/webui/src/App.tsx index b58e54f..04c8c01 100644 --- a/webui/src/App.tsx +++ b/webui/src/App.tsx @@ -3,7 +3,7 @@ import Header from "@/components/Header"; import InstanceList from "@/components/InstanceList"; import InstanceDialog from "@/components/InstanceDialog"; import LoginDialog from "@/components/LoginDialog"; -import BackendInfoDialog from "./components/BackendInfoDialog"; +import SystemInfoDialog from "./components/SystemInfoDialog"; import { type CreateInstanceOptions, type Instance } from "@/types/instance"; import { useInstances } from "@/contexts/InstancesContext"; import { useAuth } from "@/contexts/AuthContext"; @@ -13,7 +13,7 @@ import { Toaster } from "sonner"; function App() { const { isAuthenticated, isLoading: authLoading } = useAuth(); const [isInstanceModalOpen, setIsInstanceModalOpen] = useState(false); - const [isBackendInfoModalOpen, setIsBackendInfoModalOpen] = useState(false); + const [isSystemInfoModalOpen, setIsSystemInfoModalOpen] = useState(false); const [editingInstance, setEditingInstance] = useState( undefined ); @@ -37,8 +37,8 @@ function App() { } }; - const handleShowBackendInfo = () => { - setIsBackendInfoModalOpen(true); + const handleShowSystemInfo = () => { + setIsSystemInfoModalOpen(true); }; // Show loading spinner while checking auth @@ -70,7 +70,7 @@ function App() { return (
-
+
@@ -82,9 +82,9 @@ function App() { instance={editingInstance} /> - diff --git a/webui/src/components/ParseCommandDialog.tsx b/webui/src/components/ParseCommandDialog.tsx index 5043a57..ba5075d 100644 --- a/webui/src/components/ParseCommandDialog.tsx +++ b/webui/src/components/ParseCommandDialog.tsx @@ -105,9 +105,9 @@ const ParseCommandDialog: React.FC = ({
diff --git a/webui/src/components/BackendInfoDialog.tsx b/webui/src/components/SystemInfoDialog.tsx similarity index 66% rename from webui/src/components/BackendInfoDialog.tsx rename to webui/src/components/SystemInfoDialog.tsx index 9ce0881..f39d874 100644 --- a/webui/src/components/BackendInfoDialog.tsx +++ b/webui/src/components/SystemInfoDialog.tsx @@ -20,6 +20,7 @@ import { Info } from 'lucide-react' import { serverApi } from '@/lib/api' +import { BackendType, type BackendTypeValue } from '@/types/instance' // Helper to get version from environment const getAppVersion = (): string => { @@ -30,7 +31,7 @@ const getAppVersion = (): string => { } } -interface BackendInfoDialogProps { +interface SystemInfoDialogProps { open: boolean onOpenChange: (open: boolean) => void } @@ -41,27 +42,25 @@ interface BackendInfo { help: string } -type BackendType = 'llama-cpp' | 'mlx' | 'vllm' - const BACKEND_OPTIONS = [ - { value: 'llama-cpp', label: 'Llama.cpp' }, - { value: 'mlx', label: 'MLX' }, - { value: 'vllm', label: 'vLLM' }, -] as const + { value: BackendType.LLAMA_CPP, label: 'Llama Server' }, + { value: BackendType.MLX_LM, label: 'MLX LM' }, + { value: BackendType.VLLM, label: 'vLLM' }, +] -const BackendInfoDialog: React.FC = ({ +const SystemInfoDialog: React.FC = ({ open, onOpenChange }) => { - const [selectedBackend, setSelectedBackend] = useState('llama-cpp') + const [selectedBackend, setSelectedBackend] = useState(BackendType.LLAMA_CPP) const [backendInfo, setBackendInfo] = useState(null) const [loading, setLoading] = useState(false) const [error, setError] = useState(null) const [showHelp, setShowHelp] = useState(false) // Fetch backend info - const fetchBackendInfo = async (backend: BackendType) => { - if (backend !== 'llama-cpp') { + const fetchBackendInfo = async (backend: BackendTypeValue) => { + if (backend !== BackendType.LLAMA_CPP) { setBackendInfo(null) setError(null) return @@ -88,21 +87,21 @@ const BackendInfoDialog: React.FC = ({ // Load data when dialog opens or backend changes useEffect(() => { if (open) { - fetchBackendInfo(selectedBackend) + void fetchBackendInfo(selectedBackend) } }, [open, selectedBackend]) const handleBackendChange = (value: string) => { - setSelectedBackend(value as BackendType) + setSelectedBackend(value as BackendTypeValue) setShowHelp(false) // Reset help section when switching backends } - const renderBackendContent = () => { - if (selectedBackend !== 'llama-cpp') { + const renderBackendSpecificContent = () => { + if (selectedBackend !== BackendType.LLAMA_CPP) { return ( -
+
- +

Backend Info Not Available

@@ -116,7 +115,7 @@ const BackendInfoDialog: React.FC = ({ if (loading && !backendInfo) { return ( -

+
Loading backend information...
@@ -138,17 +137,6 @@ const BackendInfoDialog: React.FC = ({ return (
- {/* Llamactl Version Section */} -
-

Llamactl Version

- -
-
-              {getAppVersion()}
-            
-
-
- {/* Backend Version Section */}

@@ -216,49 +204,61 @@ const BackendInfoDialog: React.FC = ({ -
-
- - - Backend Information - - - View backend-specific environment and capabilities - -
- -
-
- handleBackendChange(value || 'llama-cpp')} - options={BACKEND_OPTIONS} - className="text-sm" - /> -
- - {selectedBackend === 'llama-cpp' && ( - - )} -
-
+ + + System Information + + + View system and backend-specific environment and capabilities +
- {renderBackendContent()} +
+ {/* Llamactl Version Section - Always shown */} +
+

Llamactl Version

+
+
+                  {getAppVersion()}
+                
+
+
+ + {/* Backend Selection Section */} +
+

Backend Information

+
+
+ handleBackendChange(value || BackendType.LLAMA_CPP)} + options={BACKEND_OPTIONS} + className="text-sm" + /> +
+ {selectedBackend === BackendType.LLAMA_CPP && ( + + )} +
+
+ + {/* Backend-specific content */} + {renderBackendSpecificContent()} +
@@ -271,4 +271,4 @@ const BackendInfoDialog: React.FC = ({ ) } -export default BackendInfoDialog \ No newline at end of file +export default SystemInfoDialog \ No newline at end of file diff --git a/webui/src/components/instance/BackendConfigurationCard.tsx b/webui/src/components/instance/BackendConfigurationCard.tsx index 3e5e43f..5bf7c36 100644 --- a/webui/src/components/instance/BackendConfigurationCard.tsx +++ b/webui/src/components/instance/BackendConfigurationCard.tsx @@ -37,9 +37,9 @@ const BackendConfigurationCard: React.FC = ({ value={formData.backend_type || BackendType.LLAMA_CPP} onChange={(value) => onChange('backend_type', value)} options={[ - { value: BackendType.LLAMA_CPP, label: 'Llama Server (llama_cpp)' }, - { value: BackendType.MLX_LM, label: 'MLX LM (mlx_lm)' }, - { value: BackendType.VLLM, label: 'vLLM (vllm)' } + { value: BackendType.LLAMA_CPP, label: 'Llama Server' }, + { value: BackendType.MLX_LM, label: 'MLX LM' }, + { value: BackendType.VLLM, label: 'vLLM' } ]} description="Select the backend server type" />