Update swagger docs

This commit is contained in:
2025-10-26 16:36:24 +01:00
parent eac4f834c0
commit 3ff87f24bd
3 changed files with 323 additions and 323 deletions

View File

@@ -19,7 +19,7 @@ const docTemplate = `{
"host": "{{.Host}}", "host": "{{.Host}}",
"basePath": "{{.BasePath}}", "basePath": "{{.BasePath}}",
"paths": { "paths": {
"/backends/llama-cpp/devices": { "/api/v1/backends/llama-cpp/devices": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -47,7 +47,7 @@ const docTemplate = `{
} }
} }
}, },
"/backends/llama-cpp/help": { "/api/v1/backends/llama-cpp/help": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -75,7 +75,7 @@ const docTemplate = `{
} }
} }
}, },
"/backends/llama-cpp/parse-command": { "/api/v1/backends/llama-cpp/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -132,7 +132,7 @@ const docTemplate = `{
} }
} }
}, },
"/backends/llama-cpp/version": { "/api/v1/backends/llama-cpp/version": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -160,7 +160,7 @@ const docTemplate = `{
} }
} }
}, },
"/backends/mlx/parse-command": { "/api/v1/backends/mlx/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -208,7 +208,7 @@ const docTemplate = `{
} }
} }
}, },
"/backends/vllm/parse-command": { "/api/v1/backends/vllm/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -256,7 +256,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances": { "/api/v1/instances": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -287,7 +287,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}": { "/api/v1/instances/{name}": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -474,7 +474,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}/logs": { "/api/v1/instances/{name}/logs": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -523,7 +523,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}/proxy": { "/api/v1/instances/{name}/proxy": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -613,7 +613,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}/restart": { "/api/v1/instances/{name}/restart": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -656,7 +656,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}/start": { "/api/v1/instances/{name}/start": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -699,7 +699,7 @@ const docTemplate = `{
} }
} }
}, },
"/instances/{name}/stop": { "/api/v1/instances/{name}/stop": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -742,6 +742,114 @@ const docTemplate = `{
} }
} }
}, },
"/api/v1/nodes": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a map of all nodes configured in the server (node name -\u003e node config)",
"tags": [
"nodes"
],
"summary": "List all configured nodes",
"responses": {
"200": {
"description": "Map of nodes",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/server.NodeResponse"
}
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/nodes/{name}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the details of a specific node by name",
"tags": [
"nodes"
],
"summary": "Get details of a specific node",
"parameters": [
{
"type": "string",
"description": "Node Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Node details",
"schema": {
"$ref": "#/definitions/server.NodeResponse"
}
},
"400": {
"description": "Invalid name format",
"schema": {
"type": "string"
}
},
"404": {
"description": "Node not found",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/version": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the version of the llamactl command",
"tags": [
"version"
],
"summary": "Get llamactl version",
"responses": {
"200": {
"description": "Version information",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/llama-cpp/{name}/": { "/llama-cpp/{name}/": {
"get": { "get": {
"security": [ "security": [
@@ -1303,86 +1411,6 @@ const docTemplate = `{
} }
} }
}, },
"/nodes": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a map of all nodes configured in the server (node name -\u003e node config)",
"tags": [
"nodes"
],
"summary": "List all configured nodes",
"responses": {
"200": {
"description": "Map of nodes",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/server.NodeResponse"
}
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/nodes/{name}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the details of a specific node by name",
"tags": [
"nodes"
],
"summary": "Get details of a specific node",
"parameters": [
{
"type": "string",
"description": "Node Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Node details",
"schema": {
"$ref": "#/definitions/server.NodeResponse"
}
},
"400": {
"description": "Invalid name format",
"schema": {
"type": "string"
}
},
"404": {
"description": "Node not found",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/": { "/v1/": {
"post": { "post": {
"security": [ "security": [
@@ -1444,34 +1472,6 @@ const docTemplate = `{
} }
} }
} }
},
"/version": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the version of the llamactl command",
"tags": [
"version"
],
"summary": "Get llamactl version",
"responses": {
"200": {
"description": "Version information",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
} }
}, },
"definitions": { "definitions": {

View File

@@ -12,7 +12,7 @@
}, },
"basePath": "/api/v1", "basePath": "/api/v1",
"paths": { "paths": {
"/backends/llama-cpp/devices": { "/api/v1/backends/llama-cpp/devices": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -40,7 +40,7 @@
} }
} }
}, },
"/backends/llama-cpp/help": { "/api/v1/backends/llama-cpp/help": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -68,7 +68,7 @@
} }
} }
}, },
"/backends/llama-cpp/parse-command": { "/api/v1/backends/llama-cpp/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -125,7 +125,7 @@
} }
} }
}, },
"/backends/llama-cpp/version": { "/api/v1/backends/llama-cpp/version": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -153,7 +153,7 @@
} }
} }
}, },
"/backends/mlx/parse-command": { "/api/v1/backends/mlx/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -201,7 +201,7 @@
} }
} }
}, },
"/backends/vllm/parse-command": { "/api/v1/backends/vllm/parse-command": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -249,7 +249,7 @@
} }
} }
}, },
"/instances": { "/api/v1/instances": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -280,7 +280,7 @@
} }
} }
}, },
"/instances/{name}": { "/api/v1/instances/{name}": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -467,7 +467,7 @@
} }
} }
}, },
"/instances/{name}/logs": { "/api/v1/instances/{name}/logs": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -516,7 +516,7 @@
} }
} }
}, },
"/instances/{name}/proxy": { "/api/v1/instances/{name}/proxy": {
"get": { "get": {
"security": [ "security": [
{ {
@@ -606,7 +606,7 @@
} }
} }
}, },
"/instances/{name}/restart": { "/api/v1/instances/{name}/restart": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -649,7 +649,7 @@
} }
} }
}, },
"/instances/{name}/start": { "/api/v1/instances/{name}/start": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -692,7 +692,7 @@
} }
} }
}, },
"/instances/{name}/stop": { "/api/v1/instances/{name}/stop": {
"post": { "post": {
"security": [ "security": [
{ {
@@ -735,6 +735,114 @@
} }
} }
}, },
"/api/v1/nodes": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a map of all nodes configured in the server (node name -\u003e node config)",
"tags": [
"nodes"
],
"summary": "List all configured nodes",
"responses": {
"200": {
"description": "Map of nodes",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/server.NodeResponse"
}
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/nodes/{name}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the details of a specific node by name",
"tags": [
"nodes"
],
"summary": "Get details of a specific node",
"parameters": [
{
"type": "string",
"description": "Node Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Node details",
"schema": {
"$ref": "#/definitions/server.NodeResponse"
}
},
"400": {
"description": "Invalid name format",
"schema": {
"type": "string"
}
},
"404": {
"description": "Node not found",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/api/v1/version": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the version of the llamactl command",
"tags": [
"version"
],
"summary": "Get llamactl version",
"responses": {
"200": {
"description": "Version information",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/llama-cpp/{name}/": { "/llama-cpp/{name}/": {
"get": { "get": {
"security": [ "security": [
@@ -1296,86 +1404,6 @@
} }
} }
}, },
"/nodes": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns a map of all nodes configured in the server (node name -\u003e node config)",
"tags": [
"nodes"
],
"summary": "List all configured nodes",
"responses": {
"200": {
"description": "Map of nodes",
"schema": {
"type": "object",
"additionalProperties": {
"$ref": "#/definitions/server.NodeResponse"
}
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/nodes/{name}": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the details of a specific node by name",
"tags": [
"nodes"
],
"summary": "Get details of a specific node",
"parameters": [
{
"type": "string",
"description": "Node Name",
"name": "name",
"in": "path",
"required": true
}
],
"responses": {
"200": {
"description": "Node details",
"schema": {
"$ref": "#/definitions/server.NodeResponse"
}
},
"400": {
"description": "Invalid name format",
"schema": {
"type": "string"
}
},
"404": {
"description": "Node not found",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
},
"/v1/": { "/v1/": {
"post": { "post": {
"security": [ "security": [
@@ -1437,34 +1465,6 @@
} }
} }
} }
},
"/version": {
"get": {
"security": [
{
"ApiKeyAuth": []
}
],
"description": "Returns the version of the llamactl command",
"tags": [
"version"
],
"summary": "Get llamactl version",
"responses": {
"200": {
"description": "Version information",
"schema": {
"type": "string"
}
},
"500": {
"description": "Internal Server Error",
"schema": {
"type": "string"
}
}
}
}
} }
}, },
"definitions": { "definitions": {

View File

@@ -69,7 +69,7 @@ info:
title: llamactl API title: llamactl API
version: "1.0" version: "1.0"
paths: paths:
/backends/llama-cpp/devices: /api/v1/backends/llama-cpp/devices:
get: get:
description: Returns a list of available devices for the llama server description: Returns a list of available devices for the llama server
responses: responses:
@@ -86,7 +86,7 @@ paths:
summary: List available devices for llama server summary: List available devices for llama server
tags: tags:
- backends - backends
/backends/llama-cpp/help: /api/v1/backends/llama-cpp/help:
get: get:
description: Returns the help text for the llama server command description: Returns the help text for the llama server command
responses: responses:
@@ -103,7 +103,7 @@ paths:
summary: Get help for llama server summary: Get help for llama server
tags: tags:
- backends - backends
/backends/llama-cpp/parse-command: /api/v1/backends/llama-cpp/parse-command:
post: post:
consumes: consumes:
- application/json - application/json
@@ -139,7 +139,7 @@ paths:
summary: Parse llama-server command summary: Parse llama-server command
tags: tags:
- backends - backends
/backends/llama-cpp/version: /api/v1/backends/llama-cpp/version:
get: get:
description: Returns the version of the llama server command description: Returns the version of the llama server command
responses: responses:
@@ -156,7 +156,7 @@ paths:
summary: Get version of llama server summary: Get version of llama server
tags: tags:
- backends - backends
/backends/mlx/parse-command: /api/v1/backends/mlx/parse-command:
post: post:
consumes: consumes:
- application/json - application/json
@@ -186,7 +186,7 @@ paths:
summary: Parse mlx_lm.server command summary: Parse mlx_lm.server command
tags: tags:
- backends - backends
/backends/vllm/parse-command: /api/v1/backends/vllm/parse-command:
post: post:
consumes: consumes:
- application/json - application/json
@@ -216,7 +216,7 @@ paths:
summary: Parse vllm serve command summary: Parse vllm serve command
tags: tags:
- backends - backends
/instances: /api/v1/instances:
get: get:
description: Returns a list of all instances managed by the server description: Returns a list of all instances managed by the server
responses: responses:
@@ -235,7 +235,7 @@ paths:
summary: List all instances summary: List all instances
tags: tags:
- instances - instances
/instances/{name}: /api/v1/instances/{name}:
delete: delete:
description: Stops and removes a specific instance by name description: Stops and removes a specific instance by name
parameters: parameters:
@@ -354,7 +354,7 @@ paths:
summary: Update an instance's configuration summary: Update an instance's configuration
tags: tags:
- instances - instances
/instances/{name}/logs: /api/v1/instances/{name}/logs:
get: get:
description: Returns the logs from a specific instance by name with optional description: Returns the logs from a specific instance by name with optional
line limit line limit
@@ -386,7 +386,7 @@ paths:
summary: Get logs from a specific instance summary: Get logs from a specific instance
tags: tags:
- instances - instances
/instances/{name}/proxy: /api/v1/instances/{name}/proxy:
get: get:
description: Forwards HTTP requests to the llama-server instance running on description: Forwards HTTP requests to the llama-server instance running on
a specific port a specific port
@@ -447,7 +447,7 @@ paths:
if stopped if stopped
tags: tags:
- instances - instances
/instances/{name}/restart: /api/v1/instances/{name}/restart:
post: post:
description: Restarts a specific instance by name description: Restarts a specific instance by name
parameters: parameters:
@@ -474,7 +474,7 @@ paths:
summary: Restart a running instance summary: Restart a running instance
tags: tags:
- instances - instances
/instances/{name}/start: /api/v1/instances/{name}/start:
post: post:
description: Starts a specific instance by name description: Starts a specific instance by name
parameters: parameters:
@@ -501,7 +501,7 @@ paths:
summary: Start a stopped instance summary: Start a stopped instance
tags: tags:
- instances - instances
/instances/{name}/stop: /api/v1/instances/{name}/stop:
post: post:
description: Stops a specific instance by name description: Stops a specific instance by name
parameters: parameters:
@@ -528,6 +528,74 @@ paths:
summary: Stop a running instance summary: Stop a running instance
tags: tags:
- instances - instances
/api/v1/nodes:
get:
description: Returns a map of all nodes configured in the server (node name
-> node config)
responses:
"200":
description: Map of nodes
schema:
additionalProperties:
$ref: '#/definitions/server.NodeResponse'
type: object
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: List all configured nodes
tags:
- nodes
/api/v1/nodes/{name}:
get:
description: Returns the details of a specific node by name
parameters:
- description: Node Name
in: path
name: name
required: true
type: string
responses:
"200":
description: Node details
schema:
$ref: '#/definitions/server.NodeResponse'
"400":
description: Invalid name format
schema:
type: string
"404":
description: Node not found
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Get details of a specific node
tags:
- nodes
/api/v1/version:
get:
description: Returns the version of the llamactl command
responses:
"200":
description: Version information
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Get llamactl version
tags:
- version
/llama-cpp/{name}/: /llama-cpp/{name}/:
get: get:
description: Proxies requests to the llama.cpp UI for the specified instance description: Proxies requests to the llama.cpp UI for the specified instance
@@ -897,57 +965,6 @@ paths:
summary: Proxy requests to llama.cpp server instance summary: Proxy requests to llama.cpp server instance
tags: tags:
- backends - backends
/nodes:
get:
description: Returns a map of all nodes configured in the server (node name
-> node config)
responses:
"200":
description: Map of nodes
schema:
additionalProperties:
$ref: '#/definitions/server.NodeResponse'
type: object
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: List all configured nodes
tags:
- nodes
/nodes/{name}:
get:
description: Returns the details of a specific node by name
parameters:
- description: Node Name
in: path
name: name
required: true
type: string
responses:
"200":
description: Node details
schema:
$ref: '#/definitions/server.NodeResponse'
"400":
description: Invalid name format
schema:
type: string
"404":
description: Node not found
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Get details of a specific node
tags:
- nodes
/v1/: /v1/:
post: post:
consumes: consumes:
@@ -989,23 +1006,6 @@ paths:
summary: List instances in OpenAI-compatible format summary: List instances in OpenAI-compatible format
tags: tags:
- openai - openai
/version:
get:
description: Returns the version of the llamactl command
responses:
"200":
description: Version information
schema:
type: string
"500":
description: Internal Server Error
schema:
type: string
security:
- ApiKeyAuth: []
summary: Get llamactl version
tags:
- version
securityDefinitions: securityDefinitions:
ApiKeyAuth: ApiKeyAuth:
in: header in: header