mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-11-07 09:34:22 +00:00
Add vLLM backend support to webui
This commit is contained in:
@@ -5,6 +5,7 @@ export { type CreateInstanceOptions } from '@/schemas/instanceOptions'
|
||||
export const BackendType = {
|
||||
LLAMA_CPP: 'llama_cpp',
|
||||
MLX_LM: 'mlx_lm',
|
||||
VLLM: 'vllm',
|
||||
// MLX_VLM: 'mlx_vlm', // Future expansion
|
||||
} as const
|
||||
|
||||
|
||||
Reference in New Issue
Block a user