mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-12-23 01:24:24 +00:00
Initial api key store implementation
This commit is contained in:
@@ -27,7 +27,7 @@ func SetupRouter(handler *Handler) *chi.Mux {
|
||||
}))
|
||||
|
||||
// Add API authentication middleware
|
||||
authMiddleware := NewAPIAuthMiddleware(handler.cfg.Auth)
|
||||
authMiddleware := NewAPIAuthMiddleware(handler.cfg.Auth, handler.authStore)
|
||||
|
||||
if handler.cfg.Server.EnableSwagger {
|
||||
r.Get("/swagger/*", httpSwagger.Handler(
|
||||
@@ -46,6 +46,17 @@ func SetupRouter(handler *Handler) *chi.Mux {
|
||||
|
||||
r.Get("/config", handler.ConfigHandler())
|
||||
|
||||
// API key management endpoints
|
||||
r.Route("/auth", func(r chi.Router) {
|
||||
r.Route("/keys", func(r chi.Router) {
|
||||
r.Post("/", handler.CreateInferenceKey()) // Create API key
|
||||
r.Get("/", handler.ListInferenceKeys()) // List API keys
|
||||
r.Get("/{id}", handler.GetInferenceKey()) // Get API key details
|
||||
r.Delete("/{id}", handler.DeleteInferenceKey()) // Delete API key
|
||||
r.Get("/{id}/permissions", handler.GetInferenceKeyPermissions()) // Get key permissions
|
||||
})
|
||||
})
|
||||
|
||||
// Backend-specific endpoints
|
||||
r.Route("/backends", func(r chi.Router) {
|
||||
r.Route("/llama-cpp", func(r chi.Router) {
|
||||
@@ -94,13 +105,13 @@ func SetupRouter(handler *Handler) *chi.Mux {
|
||||
})
|
||||
})
|
||||
|
||||
r.Route(("/v1"), func(r chi.Router) {
|
||||
r.Route("/v1", func(r chi.Router) {
|
||||
|
||||
if authMiddleware != nil && handler.cfg.Auth.RequireInferenceAuth {
|
||||
r.Use(authMiddleware.AuthMiddleware(KeyTypeInference))
|
||||
}
|
||||
|
||||
r.Get(("/models"), handler.OpenAIListInstances()) // List instances in OpenAI-compatible format
|
||||
r.Get("/models", handler.OpenAIListInstances()) // List instances in OpenAI-compatible format
|
||||
|
||||
// OpenAI-compatible proxy endpoint
|
||||
// Handles all POST requests to /v1/*, including:
|
||||
@@ -128,7 +139,7 @@ func SetupRouter(handler *Handler) *chi.Mux {
|
||||
r.Use(authMiddleware.AuthMiddleware(KeyTypeInference))
|
||||
}
|
||||
|
||||
// This handler auto start the server if it's not running
|
||||
// This handler auto starts the server if it's not running
|
||||
llamaCppHandler := handler.LlamaCppProxy()
|
||||
|
||||
// llama.cpp server specific proxy endpoints
|
||||
|
||||
Reference in New Issue
Block a user