package llamactl import ( "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" httpSwagger "github.com/swaggo/http-swagger" _ "llamactl/docs" ) func SetupRouter(handler *Handler) *chi.Mux { r := chi.NewRouter() r.Use(middleware.Logger) r.Get("/swagger/*", httpSwagger.Handler( httpSwagger.URL("/swagger/doc.json"), )) // Define routes r.Route("/api/v1", func(r chi.Router) { r.Route("/server", func(r chi.Router) { r.Get("/help", handler.HelpHandler()) r.Get("/version", handler.VersionHandler()) r.Get("/devices", handler.ListDevicesHandler()) }) // Instance management endpoints r.Route("/instances", func(r chi.Router) { r.Get("/", handler.ListInstances()) // List all instances r.Post("/", handler.CreateInstance()) // Create and start new instance r.Route("/{name}", func(r chi.Router) { // Instance management r.Get("/", handler.GetInstance()) // Get instance details r.Put("/", handler.UpdateInstance()) // Update instance configuration r.Delete("/", handler.DeleteInstance()) // Stop and remove instance r.Post("/start", handler.StartInstance()) // Start stopped instance r.Post("/stop", handler.StopInstance()) // Stop running instance r.Post("/restart", handler.RestartInstance()) // Restart instance // r.Get("/logs", handler.GetInstanceLogs()) // Get instance logs // Llama.cpp server proxy endpoints (proxied to the actual llama.cpp server) r.Route("/proxy", func(r chi.Router) { r.HandleFunc("/*", handler.ProxyToInstance()) // Proxy all llama.cpp server requests }) }) }) }) // OpenAI-compatible endpoints (model name in request body determines routing) // r.Post("/v1/completions", handler.OpenAICompletions()) // Route based on model name in request // r.Post("/v1/chat/completions", handler.OpenAIChatCompletions()) // Route based on model name in request // r.Post("/v1/embeddings", handler.OpenAIEmbeddings()) // Route based on model name in request (if supported) // r.Post("/v1/rerank", handler.OpenAIRerank()) // Route based on model name in request (if supported) // r.Post("/v1/reranking", handler.OpenAIReranking()) // Route based on model name in request (if supported) return r }