mirror of
https://github.com/lordmathis/llamactl.git
synced 2025-11-06 00:54:23 +00:00
Deployed 12bbf34 to dev with MkDocs 1.5.3 and mike 2.0.0
This commit is contained in:
Binary file not shown.
@@ -854,46 +854,49 @@
|
|||||||
<a id="__codelineno-1-8" name="__codelineno-1-8" href="#__codelineno-1-8"></a><span class="w"> </span><span class="nt">llama-cpp</span><span class="p">:</span>
|
<a id="__codelineno-1-8" name="__codelineno-1-8" href="#__codelineno-1-8"></a><span class="w"> </span><span class="nt">llama-cpp</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-9" name="__codelineno-1-9" href="#__codelineno-1-9"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"llama-server"</span>
|
<a id="__codelineno-1-9" name="__codelineno-1-9" href="#__codelineno-1-9"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"llama-server"</span>
|
||||||
<a id="__codelineno-1-10" name="__codelineno-1-10" href="#__codelineno-1-10"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
<a id="__codelineno-1-10" name="__codelineno-1-10" href="#__codelineno-1-10"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
||||||
<a id="__codelineno-1-11" name="__codelineno-1-11" href="#__codelineno-1-11"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
<a id="__codelineno-1-11" name="__codelineno-1-11" href="#__codelineno-1-11"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
<a id="__codelineno-1-12" name="__codelineno-1-12" href="#__codelineno-1-12"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
<a id="__codelineno-1-12" name="__codelineno-1-12" href="#__codelineno-1-12"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-13" name="__codelineno-1-13" href="#__codelineno-1-13"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"ghcr.io/ggml-org/llama.cpp:server"</span>
|
<a id="__codelineno-1-13" name="__codelineno-1-13" href="#__codelineno-1-13"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
||||||
<a id="__codelineno-1-14" name="__codelineno-1-14" href="#__codelineno-1-14"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-1-14" name="__codelineno-1-14" href="#__codelineno-1-14"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"ghcr.io/ggml-org/llama.cpp:server"</span>
|
||||||
<a id="__codelineno-1-15" name="__codelineno-1-15" href="#__codelineno-1-15"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
<a id="__codelineno-1-15" name="__codelineno-1-15" href="#__codelineno-1-15"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-1-16" name="__codelineno-1-16" href="#__codelineno-1-16"></a>
|
<a id="__codelineno-1-16" name="__codelineno-1-16" href="#__codelineno-1-16"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
||||||
<a id="__codelineno-1-17" name="__codelineno-1-17" href="#__codelineno-1-17"></a><span class="w"> </span><span class="nt">vllm</span><span class="p">:</span>
|
<a id="__codelineno-1-17" name="__codelineno-1-17" href="#__codelineno-1-17"></a>
|
||||||
<a id="__codelineno-1-18" name="__codelineno-1-18" href="#__codelineno-1-18"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm"</span>
|
<a id="__codelineno-1-18" name="__codelineno-1-18" href="#__codelineno-1-18"></a><span class="w"> </span><span class="nt">vllm</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-19" name="__codelineno-1-19" href="#__codelineno-1-19"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"serve"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-1-19" name="__codelineno-1-19" href="#__codelineno-1-19"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm"</span>
|
||||||
<a id="__codelineno-1-20" name="__codelineno-1-20" href="#__codelineno-1-20"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
<a id="__codelineno-1-20" name="__codelineno-1-20" href="#__codelineno-1-20"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"serve"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-1-21" name="__codelineno-1-21" href="#__codelineno-1-21"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
<a id="__codelineno-1-21" name="__codelineno-1-21" href="#__codelineno-1-21"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
<a id="__codelineno-1-22" name="__codelineno-1-22" href="#__codelineno-1-22"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm/vllm-openai:latest"</span>
|
<a id="__codelineno-1-22" name="__codelineno-1-22" href="#__codelineno-1-22"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-23" name="__codelineno-1-23" href="#__codelineno-1-23"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--shm-size"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"1g"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-1-23" name="__codelineno-1-23" href="#__codelineno-1-23"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
||||||
<a id="__codelineno-1-24" name="__codelineno-1-24" href="#__codelineno-1-24"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
<a id="__codelineno-1-24" name="__codelineno-1-24" href="#__codelineno-1-24"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm/vllm-openai:latest"</span>
|
||||||
<a id="__codelineno-1-25" name="__codelineno-1-25" href="#__codelineno-1-25"></a>
|
<a id="__codelineno-1-25" name="__codelineno-1-25" href="#__codelineno-1-25"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--shm-size"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"1g"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-1-26" name="__codelineno-1-26" href="#__codelineno-1-26"></a><span class="w"> </span><span class="nt">mlx</span><span class="p">:</span>
|
<a id="__codelineno-1-26" name="__codelineno-1-26" href="#__codelineno-1-26"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
||||||
<a id="__codelineno-1-27" name="__codelineno-1-27" href="#__codelineno-1-27"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"mlx_lm.server"</span>
|
<a id="__codelineno-1-27" name="__codelineno-1-27" href="#__codelineno-1-27"></a>
|
||||||
<a id="__codelineno-1-28" name="__codelineno-1-28" href="#__codelineno-1-28"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
<a id="__codelineno-1-28" name="__codelineno-1-28" href="#__codelineno-1-28"></a><span class="w"> </span><span class="nt">mlx</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-29" name="__codelineno-1-29" href="#__codelineno-1-29"></a>
|
<a id="__codelineno-1-29" name="__codelineno-1-29" href="#__codelineno-1-29"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"mlx_lm.server"</span>
|
||||||
<a id="__codelineno-1-30" name="__codelineno-1-30" href="#__codelineno-1-30"></a><span class="nt">instances</span><span class="p">:</span>
|
<a id="__codelineno-1-30" name="__codelineno-1-30" href="#__codelineno-1-30"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
||||||
<a id="__codelineno-1-31" name="__codelineno-1-31" href="#__codelineno-1-31"></a><span class="w"> </span><span class="nt">port_range</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="nv">8000</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="nv">9000</span><span class="p p-Indicator">]</span><span class="w"> </span><span class="c1"># Port range for instances</span>
|
<a id="__codelineno-1-31" name="__codelineno-1-31" href="#__codelineno-1-31"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
<a id="__codelineno-1-32" name="__codelineno-1-32" href="#__codelineno-1-32"></a><span class="w"> </span><span class="nt">data_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl</span><span class="w"> </span><span class="c1"># Data directory (platform-specific, see below)</span>
|
<a id="__codelineno-1-32" name="__codelineno-1-32" href="#__codelineno-1-32"></a>
|
||||||
<a id="__codelineno-1-33" name="__codelineno-1-33" href="#__codelineno-1-33"></a><span class="w"> </span><span class="nt">configs_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl/instances</span><span class="w"> </span><span class="c1"># Instance configs directory</span>
|
<a id="__codelineno-1-33" name="__codelineno-1-33" href="#__codelineno-1-33"></a><span class="nt">instances</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-34" name="__codelineno-1-34" href="#__codelineno-1-34"></a><span class="w"> </span><span class="nt">logs_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl/logs</span><span class="w"> </span><span class="c1"># Logs directory</span>
|
<a id="__codelineno-1-34" name="__codelineno-1-34" href="#__codelineno-1-34"></a><span class="w"> </span><span class="nt">port_range</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="nv">8000</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="nv">9000</span><span class="p p-Indicator">]</span><span class="w"> </span><span class="c1"># Port range for instances</span>
|
||||||
<a id="__codelineno-1-35" name="__codelineno-1-35" href="#__codelineno-1-35"></a><span class="w"> </span><span class="nt">auto_create_dirs</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Auto-create data/config/logs dirs if missing</span>
|
<a id="__codelineno-1-35" name="__codelineno-1-35" href="#__codelineno-1-35"></a><span class="w"> </span><span class="nt">data_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl</span><span class="w"> </span><span class="c1"># Data directory (platform-specific, see below)</span>
|
||||||
<a id="__codelineno-1-36" name="__codelineno-1-36" href="#__codelineno-1-36"></a><span class="w"> </span><span class="nt">max_instances</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">-1</span><span class="w"> </span><span class="c1"># Max instances (-1 = unlimited)</span>
|
<a id="__codelineno-1-36" name="__codelineno-1-36" href="#__codelineno-1-36"></a><span class="w"> </span><span class="nt">configs_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl/instances</span><span class="w"> </span><span class="c1"># Instance configs directory</span>
|
||||||
<a id="__codelineno-1-37" name="__codelineno-1-37" href="#__codelineno-1-37"></a><span class="w"> </span><span class="nt">max_running_instances</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">-1</span><span class="w"> </span><span class="c1"># Max running instances (-1 = unlimited)</span>
|
<a id="__codelineno-1-37" name="__codelineno-1-37" href="#__codelineno-1-37"></a><span class="w"> </span><span class="nt">logs_dir</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">~/.local/share/llamactl/logs</span><span class="w"> </span><span class="c1"># Logs directory</span>
|
||||||
<a id="__codelineno-1-38" name="__codelineno-1-38" href="#__codelineno-1-38"></a><span class="w"> </span><span class="nt">enable_lru_eviction</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Enable LRU eviction for idle instances</span>
|
<a id="__codelineno-1-38" name="__codelineno-1-38" href="#__codelineno-1-38"></a><span class="w"> </span><span class="nt">auto_create_dirs</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Auto-create data/config/logs dirs if missing</span>
|
||||||
<a id="__codelineno-1-39" name="__codelineno-1-39" href="#__codelineno-1-39"></a><span class="w"> </span><span class="nt">default_auto_restart</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Auto-restart new instances by default</span>
|
<a id="__codelineno-1-39" name="__codelineno-1-39" href="#__codelineno-1-39"></a><span class="w"> </span><span class="nt">max_instances</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">-1</span><span class="w"> </span><span class="c1"># Max instances (-1 = unlimited)</span>
|
||||||
<a id="__codelineno-1-40" name="__codelineno-1-40" href="#__codelineno-1-40"></a><span class="w"> </span><span class="nt">default_max_restarts</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">3</span><span class="w"> </span><span class="c1"># Max restarts for new instances</span>
|
<a id="__codelineno-1-40" name="__codelineno-1-40" href="#__codelineno-1-40"></a><span class="w"> </span><span class="nt">max_running_instances</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">-1</span><span class="w"> </span><span class="c1"># Max running instances (-1 = unlimited)</span>
|
||||||
<a id="__codelineno-1-41" name="__codelineno-1-41" href="#__codelineno-1-41"></a><span class="w"> </span><span class="nt">default_restart_delay</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">5</span><span class="w"> </span><span class="c1"># Restart delay (seconds) for new instances</span>
|
<a id="__codelineno-1-41" name="__codelineno-1-41" href="#__codelineno-1-41"></a><span class="w"> </span><span class="nt">enable_lru_eviction</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Enable LRU eviction for idle instances</span>
|
||||||
<a id="__codelineno-1-42" name="__codelineno-1-42" href="#__codelineno-1-42"></a><span class="w"> </span><span class="nt">default_on_demand_start</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Default on-demand start setting</span>
|
<a id="__codelineno-1-42" name="__codelineno-1-42" href="#__codelineno-1-42"></a><span class="w"> </span><span class="nt">default_auto_restart</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Auto-restart new instances by default</span>
|
||||||
<a id="__codelineno-1-43" name="__codelineno-1-43" href="#__codelineno-1-43"></a><span class="w"> </span><span class="nt">on_demand_start_timeout</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">120</span><span class="w"> </span><span class="c1"># Default on-demand start timeout in seconds</span>
|
<a id="__codelineno-1-43" name="__codelineno-1-43" href="#__codelineno-1-43"></a><span class="w"> </span><span class="nt">default_max_restarts</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">3</span><span class="w"> </span><span class="c1"># Max restarts for new instances</span>
|
||||||
<a id="__codelineno-1-44" name="__codelineno-1-44" href="#__codelineno-1-44"></a><span class="w"> </span><span class="nt">timeout_check_interval</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">5</span><span class="w"> </span><span class="c1"># Idle instance timeout check in minutes</span>
|
<a id="__codelineno-1-44" name="__codelineno-1-44" href="#__codelineno-1-44"></a><span class="w"> </span><span class="nt">default_restart_delay</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">5</span><span class="w"> </span><span class="c1"># Restart delay (seconds) for new instances</span>
|
||||||
<a id="__codelineno-1-45" name="__codelineno-1-45" href="#__codelineno-1-45"></a>
|
<a id="__codelineno-1-45" name="__codelineno-1-45" href="#__codelineno-1-45"></a><span class="w"> </span><span class="nt">default_on_demand_start</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Default on-demand start setting</span>
|
||||||
<a id="__codelineno-1-46" name="__codelineno-1-46" href="#__codelineno-1-46"></a><span class="nt">auth</span><span class="p">:</span>
|
<a id="__codelineno-1-46" name="__codelineno-1-46" href="#__codelineno-1-46"></a><span class="w"> </span><span class="nt">on_demand_start_timeout</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">120</span><span class="w"> </span><span class="c1"># Default on-demand start timeout in seconds</span>
|
||||||
<a id="__codelineno-1-47" name="__codelineno-1-47" href="#__codelineno-1-47"></a><span class="w"> </span><span class="nt">require_inference_auth</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Require auth for inference endpoints</span>
|
<a id="__codelineno-1-47" name="__codelineno-1-47" href="#__codelineno-1-47"></a><span class="w"> </span><span class="nt">timeout_check_interval</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">5</span><span class="w"> </span><span class="c1"># Idle instance timeout check in minutes</span>
|
||||||
<a id="__codelineno-1-48" name="__codelineno-1-48" href="#__codelineno-1-48"></a><span class="w"> </span><span class="nt">inference_keys</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span><span class="w"> </span><span class="c1"># Keys for inference endpoints</span>
|
<a id="__codelineno-1-48" name="__codelineno-1-48" href="#__codelineno-1-48"></a>
|
||||||
<a id="__codelineno-1-49" name="__codelineno-1-49" href="#__codelineno-1-49"></a><span class="w"> </span><span class="nt">require_management_auth</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Require auth for management endpoints</span>
|
<a id="__codelineno-1-49" name="__codelineno-1-49" href="#__codelineno-1-49"></a><span class="nt">auth</span><span class="p">:</span>
|
||||||
<a id="__codelineno-1-50" name="__codelineno-1-50" href="#__codelineno-1-50"></a><span class="w"> </span><span class="nt">management_keys</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span><span class="w"> </span><span class="c1"># Keys for management endpoints</span>
|
<a id="__codelineno-1-50" name="__codelineno-1-50" href="#__codelineno-1-50"></a><span class="w"> </span><span class="nt">require_inference_auth</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Require auth for inference endpoints</span>
|
||||||
|
<a id="__codelineno-1-51" name="__codelineno-1-51" href="#__codelineno-1-51"></a><span class="w"> </span><span class="nt">inference_keys</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span><span class="w"> </span><span class="c1"># Keys for inference endpoints</span>
|
||||||
|
<a id="__codelineno-1-52" name="__codelineno-1-52" href="#__codelineno-1-52"></a><span class="w"> </span><span class="nt">require_management_auth</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">true</span><span class="w"> </span><span class="c1"># Require auth for management endpoints</span>
|
||||||
|
<a id="__codelineno-1-53" name="__codelineno-1-53" href="#__codelineno-1-53"></a><span class="w"> </span><span class="nt">management_keys</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span><span class="w"> </span><span class="c1"># Keys for management endpoints</span>
|
||||||
</code></pre></div>
|
</code></pre></div>
|
||||||
<h2 id="configuration-files">Configuration Files<a class="headerlink" href="#configuration-files" title="Permanent link">¶</a></h2>
|
<h2 id="configuration-files">Configuration Files<a class="headerlink" href="#configuration-files" title="Permanent link">¶</a></h2>
|
||||||
<h3 id="configuration-file-locations">Configuration File Locations<a class="headerlink" href="#configuration-file-locations" title="Permanent link">¶</a></h3>
|
<h3 id="configuration-file-locations">Configuration File Locations<a class="headerlink" href="#configuration-file-locations" title="Permanent link">¶</a></h3>
|
||||||
@@ -930,34 +933,59 @@
|
|||||||
<a id="__codelineno-3-2" name="__codelineno-3-2" href="#__codelineno-3-2"></a><span class="w"> </span><span class="nt">llama-cpp</span><span class="p">:</span>
|
<a id="__codelineno-3-2" name="__codelineno-3-2" href="#__codelineno-3-2"></a><span class="w"> </span><span class="nt">llama-cpp</span><span class="p">:</span>
|
||||||
<a id="__codelineno-3-3" name="__codelineno-3-3" href="#__codelineno-3-3"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"llama-server"</span>
|
<a id="__codelineno-3-3" name="__codelineno-3-3" href="#__codelineno-3-3"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"llama-server"</span>
|
||||||
<a id="__codelineno-3-4" name="__codelineno-3-4" href="#__codelineno-3-4"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
<a id="__codelineno-3-4" name="__codelineno-3-4" href="#__codelineno-3-4"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
||||||
<a id="__codelineno-3-5" name="__codelineno-3-5" href="#__codelineno-3-5"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
<a id="__codelineno-3-5" name="__codelineno-3-5" href="#__codelineno-3-5"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
<a id="__codelineno-3-6" name="__codelineno-3-6" href="#__codelineno-3-6"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span><span class="w"> </span><span class="c1"># Enable Docker runtime (default: false)</span>
|
<a id="__codelineno-3-6" name="__codelineno-3-6" href="#__codelineno-3-6"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
||||||
<a id="__codelineno-3-7" name="__codelineno-3-7" href="#__codelineno-3-7"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"ghcr.io/ggml-org/llama.cpp:server"</span>
|
<a id="__codelineno-3-7" name="__codelineno-3-7" href="#__codelineno-3-7"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span><span class="w"> </span><span class="c1"># Enable Docker runtime (default: false)</span>
|
||||||
<a id="__codelineno-3-8" name="__codelineno-3-8" href="#__codelineno-3-8"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-3-8" name="__codelineno-3-8" href="#__codelineno-3-8"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"ghcr.io/ggml-org/llama.cpp:server"</span>
|
||||||
<a id="__codelineno-3-9" name="__codelineno-3-9" href="#__codelineno-3-9"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
<a id="__codelineno-3-9" name="__codelineno-3-9" href="#__codelineno-3-9"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-3-10" name="__codelineno-3-10" href="#__codelineno-3-10"></a>
|
<a id="__codelineno-3-10" name="__codelineno-3-10" href="#__codelineno-3-10"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
||||||
<a id="__codelineno-3-11" name="__codelineno-3-11" href="#__codelineno-3-11"></a><span class="w"> </span><span class="nt">vllm</span><span class="p">:</span>
|
<a id="__codelineno-3-11" name="__codelineno-3-11" href="#__codelineno-3-11"></a>
|
||||||
<a id="__codelineno-3-12" name="__codelineno-3-12" href="#__codelineno-3-12"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm"</span>
|
<a id="__codelineno-3-12" name="__codelineno-3-12" href="#__codelineno-3-12"></a><span class="w"> </span><span class="nt">vllm</span><span class="p">:</span>
|
||||||
<a id="__codelineno-3-13" name="__codelineno-3-13" href="#__codelineno-3-13"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"serve"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-3-13" name="__codelineno-3-13" href="#__codelineno-3-13"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm"</span>
|
||||||
<a id="__codelineno-3-14" name="__codelineno-3-14" href="#__codelineno-3-14"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
<a id="__codelineno-3-14" name="__codelineno-3-14" href="#__codelineno-3-14"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"serve"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-3-15" name="__codelineno-3-15" href="#__codelineno-3-15"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
<a id="__codelineno-3-15" name="__codelineno-3-15" href="#__codelineno-3-15"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
<a id="__codelineno-3-16" name="__codelineno-3-16" href="#__codelineno-3-16"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm/vllm-openai:latest"</span>
|
<a id="__codelineno-3-16" name="__codelineno-3-16" href="#__codelineno-3-16"></a><span class="w"> </span><span class="nt">docker</span><span class="p">:</span>
|
||||||
<a id="__codelineno-3-17" name="__codelineno-3-17" href="#__codelineno-3-17"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--shm-size"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"1g"</span><span class="p p-Indicator">]</span>
|
<a id="__codelineno-3-17" name="__codelineno-3-17" href="#__codelineno-3-17"></a><span class="w"> </span><span class="nt">enabled</span><span class="p">:</span><span class="w"> </span><span class="l l-Scalar l-Scalar-Plain">false</span>
|
||||||
<a id="__codelineno-3-18" name="__codelineno-3-18" href="#__codelineno-3-18"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
<a id="__codelineno-3-18" name="__codelineno-3-18" href="#__codelineno-3-18"></a><span class="w"> </span><span class="nt">image</span><span class="p">:</span><span class="w"> </span><span class="s">"vllm/vllm-openai:latest"</span>
|
||||||
<a id="__codelineno-3-19" name="__codelineno-3-19" href="#__codelineno-3-19"></a>
|
<a id="__codelineno-3-19" name="__codelineno-3-19" href="#__codelineno-3-19"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="s">"run"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--rm"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--network"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"host"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--gpus"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"all"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"--shm-size"</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="s">"1g"</span><span class="p p-Indicator">]</span>
|
||||||
<a id="__codelineno-3-20" name="__codelineno-3-20" href="#__codelineno-3-20"></a><span class="w"> </span><span class="nt">mlx</span><span class="p">:</span>
|
<a id="__codelineno-3-20" name="__codelineno-3-20" href="#__codelineno-3-20"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span>
|
||||||
<a id="__codelineno-3-21" name="__codelineno-3-21" href="#__codelineno-3-21"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"mlx_lm.server"</span>
|
<a id="__codelineno-3-21" name="__codelineno-3-21" href="#__codelineno-3-21"></a>
|
||||||
<a id="__codelineno-3-22" name="__codelineno-3-22" href="#__codelineno-3-22"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
<a id="__codelineno-3-22" name="__codelineno-3-22" href="#__codelineno-3-22"></a><span class="w"> </span><span class="nt">mlx</span><span class="p">:</span>
|
||||||
<a id="__codelineno-3-23" name="__codelineno-3-23" href="#__codelineno-3-23"></a><span class="w"> </span><span class="c1"># MLX does not support Docker</span>
|
<a id="__codelineno-3-23" name="__codelineno-3-23" href="#__codelineno-3-23"></a><span class="w"> </span><span class="nt">command</span><span class="p">:</span><span class="w"> </span><span class="s">"mlx_lm.server"</span>
|
||||||
|
<a id="__codelineno-3-24" name="__codelineno-3-24" href="#__codelineno-3-24"></a><span class="w"> </span><span class="nt">args</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[]</span>
|
||||||
|
<a id="__codelineno-3-25" name="__codelineno-3-25" href="#__codelineno-3-25"></a><span class="w"> </span><span class="nt">environment</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">{}</span><span class="w"> </span><span class="c1"># Environment variables for the backend process</span>
|
||||||
|
<a id="__codelineno-3-26" name="__codelineno-3-26" href="#__codelineno-3-26"></a><span class="w"> </span><span class="c1"># MLX does not support Docker</span>
|
||||||
</code></pre></div>
|
</code></pre></div>
|
||||||
<p><strong>Backend Configuration Fields:</strong>
|
<p><strong>Backend Configuration Fields:</strong>
|
||||||
- <code>command</code>: Executable name/path for the backend
|
- <code>command</code>: Executable name/path for the backend
|
||||||
- <code>args</code>: Default arguments prepended to all instances
|
- <code>args</code>: Default arguments prepended to all instances
|
||||||
|
- <code>environment</code>: Environment variables for the backend process (optional)
|
||||||
- <code>docker</code>: Docker-specific configuration (optional)
|
- <code>docker</code>: Docker-specific configuration (optional)
|
||||||
- <code>enabled</code>: Boolean flag to enable Docker runtime
|
- <code>enabled</code>: Boolean flag to enable Docker runtime
|
||||||
- <code>image</code>: Docker image to use
|
- <code>image</code>: Docker image to use
|
||||||
- <code>args</code>: Additional arguments passed to <code>docker run</code>
|
- <code>args</code>: Additional arguments passed to <code>docker run</code>
|
||||||
- <code>environment</code>: Environment variables for the container (optional)</p>
|
- <code>environment</code>: Environment variables for the container (optional)</p>
|
||||||
|
<p><strong>Environment Variables:</strong></p>
|
||||||
|
<p><strong>LlamaCpp Backend:</strong>
|
||||||
|
- <code>LLAMACTL_LLAMACPP_COMMAND</code> - LlamaCpp executable command
|
||||||
|
- <code>LLAMACTL_LLAMACPP_ARGS</code> - Space-separated default arguments
|
||||||
|
- <code>LLAMACTL_LLAMACPP_ENV</code> - Environment variables in format "KEY1=value1,KEY2=value2"
|
||||||
|
- <code>LLAMACTL_LLAMACPP_DOCKER_ENABLED</code> - Enable Docker runtime (true/false)
|
||||||
|
- <code>LLAMACTL_LLAMACPP_DOCKER_IMAGE</code> - Docker image to use
|
||||||
|
- <code>LLAMACTL_LLAMACPP_DOCKER_ARGS</code> - Space-separated Docker arguments
|
||||||
|
- <code>LLAMACTL_LLAMACPP_DOCKER_ENV</code> - Docker environment variables in format "KEY1=value1,KEY2=value2"</p>
|
||||||
|
<p><strong>VLLM Backend:</strong>
|
||||||
|
- <code>LLAMACTL_VLLM_COMMAND</code> - VLLM executable command
|
||||||
|
- <code>LLAMACTL_VLLM_ARGS</code> - Space-separated default arguments
|
||||||
|
- <code>LLAMACTL_VLLM_ENV</code> - Environment variables in format "KEY1=value1,KEY2=value2"
|
||||||
|
- <code>LLAMACTL_VLLM_DOCKER_ENABLED</code> - Enable Docker runtime (true/false)
|
||||||
|
- <code>LLAMACTL_VLLM_DOCKER_IMAGE</code> - Docker image to use
|
||||||
|
- <code>LLAMACTL_VLLM_DOCKER_ARGS</code> - Space-separated Docker arguments
|
||||||
|
- <code>LLAMACTL_VLLM_DOCKER_ENV</code> - Docker environment variables in format "KEY1=value1,KEY2=value2"</p>
|
||||||
|
<p><strong>MLX Backend:</strong>
|
||||||
|
- <code>LLAMACTL_MLX_COMMAND</code> - MLX executable command
|
||||||
|
- <code>LLAMACTL_MLX_ARGS</code> - Space-separated default arguments
|
||||||
|
- <code>LLAMACTL_MLX_ENV</code> - Environment variables in format "KEY1=value1,KEY2=value2"</p>
|
||||||
<h3 id="instance-configuration">Instance Configuration<a class="headerlink" href="#instance-configuration" title="Permanent link">¶</a></h3>
|
<h3 id="instance-configuration">Instance Configuration<a class="headerlink" href="#instance-configuration" title="Permanent link">¶</a></h3>
|
||||||
<div class="highlight"><pre><span></span><code><a id="__codelineno-4-1" name="__codelineno-4-1" href="#__codelineno-4-1"></a><span class="nt">instances</span><span class="p">:</span>
|
<div class="highlight"><pre><span></span><code><a id="__codelineno-4-1" name="__codelineno-4-1" href="#__codelineno-4-1"></a><span class="nt">instances</span><span class="p">:</span>
|
||||||
<a id="__codelineno-4-2" name="__codelineno-4-2" href="#__codelineno-4-2"></a><span class="w"> </span><span class="nt">port_range</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="nv">8000</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="nv">9000</span><span class="p p-Indicator">]</span><span class="w"> </span><span class="c1"># Port range for instances (default: [8000, 9000])</span>
|
<a id="__codelineno-4-2" name="__codelineno-4-2" href="#__codelineno-4-2"></a><span class="w"> </span><span class="nt">port_range</span><span class="p">:</span><span class="w"> </span><span class="p p-Indicator">[</span><span class="nv">8000</span><span class="p p-Indicator">,</span><span class="w"> </span><span class="nv">9000</span><span class="p p-Indicator">]</span><span class="w"> </span><span class="c1"># Port range for instances (default: [8000, 9000])</span>
|
||||||
@@ -1027,7 +1055,7 @@
|
|||||||
<span class="md-icon" title="Last update">
|
<span class="md-icon" title="Last update">
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
||||||
</span>
|
</span>
|
||||||
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 24, 2025</span>
|
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 28, 2025</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -850,6 +850,7 @@
|
|||||||
<ul>
|
<ul>
|
||||||
<li><strong>Instance Monitoring</strong>: Health checks, auto-restart, log management </li>
|
<li><strong>Instance Monitoring</strong>: Health checks, auto-restart, log management </li>
|
||||||
<li><strong>Smart Resource Management</strong>: Idle timeout, LRU eviction, and configurable instance limits </li>
|
<li><strong>Smart Resource Management</strong>: Idle timeout, LRU eviction, and configurable instance limits </li>
|
||||||
|
<li><strong>Environment Variables</strong>: Set custom environment variables per instance for advanced configuration </li>
|
||||||
</ul>
|
</ul>
|
||||||
<p><img alt="Dashboard Screenshot" src="images/dashboard.png" /> </p>
|
<p><img alt="Dashboard Screenshot" src="images/dashboard.png" /> </p>
|
||||||
<h2 id="quick-links">Quick Links<a class="headerlink" href="#quick-links" title="Permanent link">¶</a></h2>
|
<h2 id="quick-links">Quick Links<a class="headerlink" href="#quick-links" title="Permanent link">¶</a></h2>
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -2,37 +2,37 @@
|
|||||||
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/</loc>
|
<loc>https://llamactl.org/dev/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/getting-started/configuration/</loc>
|
<loc>https://llamactl.org/dev/getting-started/configuration/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/getting-started/installation/</loc>
|
<loc>https://llamactl.org/dev/getting-started/installation/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/getting-started/quick-start/</loc>
|
<loc>https://llamactl.org/dev/getting-started/quick-start/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/user-guide/api-reference/</loc>
|
<loc>https://llamactl.org/dev/user-guide/api-reference/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/user-guide/managing-instances/</loc>
|
<loc>https://llamactl.org/dev/user-guide/managing-instances/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
<url>
|
<url>
|
||||||
<loc>https://llamactl.org/dev/user-guide/troubleshooting/</loc>
|
<loc>https://llamactl.org/dev/user-guide/troubleshooting/</loc>
|
||||||
<lastmod>2025-09-27</lastmod>
|
<lastmod>2025-09-28</lastmod>
|
||||||
<changefreq>daily</changefreq>
|
<changefreq>daily</changefreq>
|
||||||
</url>
|
</url>
|
||||||
</urlset>
|
</urlset>
|
||||||
Binary file not shown.
@@ -1445,7 +1445,18 @@
|
|||||||
<p>Create and start a new instance.</p>
|
<p>Create and start a new instance.</p>
|
||||||
<div class="highlight"><pre><span></span><code><a id="__codelineno-11-1" name="__codelineno-11-1" href="#__codelineno-11-1"></a><span class="err">POST /api/v1/instances/{name}</span>
|
<div class="highlight"><pre><span></span><code><a id="__codelineno-11-1" name="__codelineno-11-1" href="#__codelineno-11-1"></a><span class="err">POST /api/v1/instances/{name}</span>
|
||||||
</code></pre></div>
|
</code></pre></div>
|
||||||
<p><strong>Request Body:</strong> JSON object with instance configuration. See <a href="../managing-instances/">Managing Instances</a> for available configuration options.</p>
|
<p><strong>Request Body:</strong> JSON object with instance configuration. Common fields include:</p>
|
||||||
|
<ul>
|
||||||
|
<li><code>backend_type</code>: Backend type (<code>llama_cpp</code>, <code>mlx_lm</code>, or <code>vllm</code>)</li>
|
||||||
|
<li><code>backend_options</code>: Backend-specific configuration</li>
|
||||||
|
<li><code>auto_restart</code>: Enable automatic restart on failure</li>
|
||||||
|
<li><code>max_restarts</code>: Maximum restart attempts</li>
|
||||||
|
<li><code>restart_delay</code>: Delay between restarts in seconds</li>
|
||||||
|
<li><code>on_demand_start</code>: Start instance when receiving requests</li>
|
||||||
|
<li><code>idle_timeout</code>: Idle timeout in minutes</li>
|
||||||
|
<li><code>environment</code>: Environment variables as key-value pairs</li>
|
||||||
|
</ul>
|
||||||
|
<p>See <a href="../managing-instances/">Managing Instances</a> for complete configuration options.</p>
|
||||||
<p><strong>Response:</strong>
|
<p><strong>Response:</strong>
|
||||||
<div class="highlight"><pre><span></span><code><a id="__codelineno-12-1" name="__codelineno-12-1" href="#__codelineno-12-1"></a><span class="p">{</span>
|
<div class="highlight"><pre><span></span><code><a id="__codelineno-12-1" name="__codelineno-12-1" href="#__codelineno-12-1"></a><span class="p">{</span>
|
||||||
<a id="__codelineno-12-2" name="__codelineno-12-2" href="#__codelineno-12-2"></a><span class="w"> </span><span class="nt">"name"</span><span class="p">:</span><span class="w"> </span><span class="s2">"llama2-7b"</span><span class="p">,</span>
|
<a id="__codelineno-12-2" name="__codelineno-12-2" href="#__codelineno-12-2"></a><span class="w"> </span><span class="nt">"name"</span><span class="p">:</span><span class="w"> </span><span class="s2">"llama2-7b"</span><span class="p">,</span>
|
||||||
@@ -1605,36 +1616,44 @@
|
|||||||
<a id="__codelineno-31-3" name="__codelineno-31-3" href="#__codelineno-31-3"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-3" name="__codelineno-31-3" href="#__codelineno-31-3"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-4" name="__codelineno-31-4" href="#__codelineno-31-4"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-4" name="__codelineno-31-4" href="#__codelineno-31-4"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-5" name="__codelineno-31-5" href="#__codelineno-31-5"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
<a id="__codelineno-31-5" name="__codelineno-31-5" href="#__codelineno-31-5"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
||||||
<a id="__codelineno-31-6" name="__codelineno-31-6" href="#__codelineno-31-6"></a><span class="s1"> "model": "/models/llama-2-7b.gguf"</span>
|
<a id="__codelineno-31-6" name="__codelineno-31-6" href="#__codelineno-31-6"></a><span class="s1"> "backend_type": "llama_cpp",</span>
|
||||||
<a id="__codelineno-31-7" name="__codelineno-31-7" href="#__codelineno-31-7"></a><span class="s1"> }'</span>
|
<a id="__codelineno-31-7" name="__codelineno-31-7" href="#__codelineno-31-7"></a><span class="s1"> "backend_options": {</span>
|
||||||
<a id="__codelineno-31-8" name="__codelineno-31-8" href="#__codelineno-31-8"></a>
|
<a id="__codelineno-31-8" name="__codelineno-31-8" href="#__codelineno-31-8"></a><span class="s1"> "model": "/models/llama-2-7b.gguf",</span>
|
||||||
<a id="__codelineno-31-9" name="__codelineno-31-9" href="#__codelineno-31-9"></a><span class="c1"># Check instance status</span>
|
<a id="__codelineno-31-9" name="__codelineno-31-9" href="#__codelineno-31-9"></a><span class="s1"> "gpu_layers": 32</span>
|
||||||
<a id="__codelineno-31-10" name="__codelineno-31-10" href="#__codelineno-31-10"></a>curl<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-10" name="__codelineno-31-10" href="#__codelineno-31-10"></a><span class="s1"> },</span>
|
||||||
<a id="__codelineno-31-11" name="__codelineno-31-11" href="#__codelineno-31-11"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model
|
<a id="__codelineno-31-11" name="__codelineno-31-11" href="#__codelineno-31-11"></a><span class="s1"> "environment": {</span>
|
||||||
<a id="__codelineno-31-12" name="__codelineno-31-12" href="#__codelineno-31-12"></a>
|
<a id="__codelineno-31-12" name="__codelineno-31-12" href="#__codelineno-31-12"></a><span class="s1"> "CUDA_VISIBLE_DEVICES": "0",</span>
|
||||||
<a id="__codelineno-31-13" name="__codelineno-31-13" href="#__codelineno-31-13"></a><span class="c1"># Get instance logs</span>
|
<a id="__codelineno-31-13" name="__codelineno-31-13" href="#__codelineno-31-13"></a><span class="s1"> "OMP_NUM_THREADS": "8"</span>
|
||||||
<a id="__codelineno-31-14" name="__codelineno-31-14" href="#__codelineno-31-14"></a>curl<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-14" name="__codelineno-31-14" href="#__codelineno-31-14"></a><span class="s1"> }</span>
|
||||||
<a id="__codelineno-31-15" name="__codelineno-31-15" href="#__codelineno-31-15"></a><span class="w"> </span><span class="s2">"http://localhost:8080/api/v1/instances/my-model/logs?lines=50"</span>
|
<a id="__codelineno-31-15" name="__codelineno-31-15" href="#__codelineno-31-15"></a><span class="s1"> }'</span>
|
||||||
<a id="__codelineno-31-16" name="__codelineno-31-16" href="#__codelineno-31-16"></a>
|
<a id="__codelineno-31-16" name="__codelineno-31-16" href="#__codelineno-31-16"></a>
|
||||||
<a id="__codelineno-31-17" name="__codelineno-31-17" href="#__codelineno-31-17"></a><span class="c1"># Use OpenAI-compatible chat completions</span>
|
<a id="__codelineno-31-17" name="__codelineno-31-17" href="#__codelineno-31-17"></a><span class="c1"># Check instance status</span>
|
||||||
<a id="__codelineno-31-18" name="__codelineno-31-18" href="#__codelineno-31-18"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>http://localhost:8080/v1/chat/completions<span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-18" name="__codelineno-31-18" href="#__codelineno-31-18"></a>curl<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-19" name="__codelineno-31-19" href="#__codelineno-31-19"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-19" name="__codelineno-31-19" href="#__codelineno-31-19"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model
|
||||||
<a id="__codelineno-31-20" name="__codelineno-31-20" href="#__codelineno-31-20"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-inference-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-20" name="__codelineno-31-20" href="#__codelineno-31-20"></a>
|
||||||
<a id="__codelineno-31-21" name="__codelineno-31-21" href="#__codelineno-31-21"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
<a id="__codelineno-31-21" name="__codelineno-31-21" href="#__codelineno-31-21"></a><span class="c1"># Get instance logs</span>
|
||||||
<a id="__codelineno-31-22" name="__codelineno-31-22" href="#__codelineno-31-22"></a><span class="s1"> "model": "my-model",</span>
|
<a id="__codelineno-31-22" name="__codelineno-31-22" href="#__codelineno-31-22"></a>curl<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-23" name="__codelineno-31-23" href="#__codelineno-31-23"></a><span class="s1"> "messages": [</span>
|
<a id="__codelineno-31-23" name="__codelineno-31-23" href="#__codelineno-31-23"></a><span class="w"> </span><span class="s2">"http://localhost:8080/api/v1/instances/my-model/logs?lines=50"</span>
|
||||||
<a id="__codelineno-31-24" name="__codelineno-31-24" href="#__codelineno-31-24"></a><span class="s1"> {"role": "user", "content": "Hello!"}</span>
|
<a id="__codelineno-31-24" name="__codelineno-31-24" href="#__codelineno-31-24"></a>
|
||||||
<a id="__codelineno-31-25" name="__codelineno-31-25" href="#__codelineno-31-25"></a><span class="s1"> ],</span>
|
<a id="__codelineno-31-25" name="__codelineno-31-25" href="#__codelineno-31-25"></a><span class="c1"># Use OpenAI-compatible chat completions</span>
|
||||||
<a id="__codelineno-31-26" name="__codelineno-31-26" href="#__codelineno-31-26"></a><span class="s1"> "max_tokens": 100</span>
|
<a id="__codelineno-31-26" name="__codelineno-31-26" href="#__codelineno-31-26"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>http://localhost:8080/v1/chat/completions<span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-27" name="__codelineno-31-27" href="#__codelineno-31-27"></a><span class="s1"> }'</span>
|
<a id="__codelineno-31-27" name="__codelineno-31-27" href="#__codelineno-31-27"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-28" name="__codelineno-31-28" href="#__codelineno-31-28"></a>
|
<a id="__codelineno-31-28" name="__codelineno-31-28" href="#__codelineno-31-28"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-inference-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-31-29" name="__codelineno-31-29" href="#__codelineno-31-29"></a><span class="c1"># Stop instance</span>
|
<a id="__codelineno-31-29" name="__codelineno-31-29" href="#__codelineno-31-29"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
||||||
<a id="__codelineno-31-30" name="__codelineno-31-30" href="#__codelineno-31-30"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-30" name="__codelineno-31-30" href="#__codelineno-31-30"></a><span class="s1"> "model": "my-model",</span>
|
||||||
<a id="__codelineno-31-31" name="__codelineno-31-31" href="#__codelineno-31-31"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model/stop
|
<a id="__codelineno-31-31" name="__codelineno-31-31" href="#__codelineno-31-31"></a><span class="s1"> "messages": [</span>
|
||||||
<a id="__codelineno-31-32" name="__codelineno-31-32" href="#__codelineno-31-32"></a>
|
<a id="__codelineno-31-32" name="__codelineno-31-32" href="#__codelineno-31-32"></a><span class="s1"> {"role": "user", "content": "Hello!"}</span>
|
||||||
<a id="__codelineno-31-33" name="__codelineno-31-33" href="#__codelineno-31-33"></a><span class="c1"># Delete instance</span>
|
<a id="__codelineno-31-33" name="__codelineno-31-33" href="#__codelineno-31-33"></a><span class="s1"> ],</span>
|
||||||
<a id="__codelineno-31-34" name="__codelineno-31-34" href="#__codelineno-31-34"></a>curl<span class="w"> </span>-X<span class="w"> </span>DELETE<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-31-34" name="__codelineno-31-34" href="#__codelineno-31-34"></a><span class="s1"> "max_tokens": 100</span>
|
||||||
<a id="__codelineno-31-35" name="__codelineno-31-35" href="#__codelineno-31-35"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model
|
<a id="__codelineno-31-35" name="__codelineno-31-35" href="#__codelineno-31-35"></a><span class="s1"> }'</span>
|
||||||
|
<a id="__codelineno-31-36" name="__codelineno-31-36" href="#__codelineno-31-36"></a>
|
||||||
|
<a id="__codelineno-31-37" name="__codelineno-31-37" href="#__codelineno-31-37"></a><span class="c1"># Stop instance</span>
|
||||||
|
<a id="__codelineno-31-38" name="__codelineno-31-38" href="#__codelineno-31-38"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
|
<a id="__codelineno-31-39" name="__codelineno-31-39" href="#__codelineno-31-39"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model/stop
|
||||||
|
<a id="__codelineno-31-40" name="__codelineno-31-40" href="#__codelineno-31-40"></a>
|
||||||
|
<a id="__codelineno-31-41" name="__codelineno-31-41" href="#__codelineno-31-41"></a><span class="c1"># Delete instance</span>
|
||||||
|
<a id="__codelineno-31-42" name="__codelineno-31-42" href="#__codelineno-31-42"></a>curl<span class="w"> </span>-X<span class="w"> </span>DELETE<span class="w"> </span>-H<span class="w"> </span><span class="s2">"Authorization: Bearer your-api-key"</span><span class="w"> </span><span class="se">\</span>
|
||||||
|
<a id="__codelineno-31-43" name="__codelineno-31-43" href="#__codelineno-31-43"></a><span class="w"> </span>http://localhost:8080/api/v1/instances/my-model
|
||||||
</code></pre></div>
|
</code></pre></div>
|
||||||
<h3 id="using-the-proxy-endpoint">Using the Proxy Endpoint<a class="headerlink" href="#using-the-proxy-endpoint" title="Permanent link">¶</a></h3>
|
<h3 id="using-the-proxy-endpoint">Using the Proxy Endpoint<a class="headerlink" href="#using-the-proxy-endpoint" title="Permanent link">¶</a></h3>
|
||||||
<p>You can also directly proxy requests to the llama-server instance:</p>
|
<p>You can also directly proxy requests to the llama-server instance:</p>
|
||||||
@@ -1739,7 +1758,7 @@
|
|||||||
<span class="md-icon" title="Last update">
|
<span class="md-icon" title="Last update">
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
||||||
</span>
|
</span>
|
||||||
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 22, 2025</span>
|
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 28, 2025</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1277,6 +1277,7 @@
|
|||||||
<li><strong>Restart Delay</strong>: Delay in seconds between restart attempts</li>
|
<li><strong>Restart Delay</strong>: Delay in seconds between restart attempts</li>
|
||||||
<li><strong>On Demand Start</strong>: Start instance when receiving a request to the OpenAI compatible endpoint</li>
|
<li><strong>On Demand Start</strong>: Start instance when receiving a request to the OpenAI compatible endpoint</li>
|
||||||
<li><strong>Idle Timeout</strong>: Minutes before stopping idle instance (set to 0 to disable)</li>
|
<li><strong>Idle Timeout</strong>: Minutes before stopping idle instance (set to 0 to disable)</li>
|
||||||
|
<li><strong>Environment Variables</strong>: Set custom environment variables for the instance process</li>
|
||||||
</ul>
|
</ul>
|
||||||
</li>
|
</li>
|
||||||
<li>Configure backend-specific options:<ul>
|
<li>Configure backend-specific options:<ul>
|
||||||
@@ -1327,20 +1328,25 @@
|
|||||||
<a id="__codelineno-0-37" name="__codelineno-0-37" href="#__codelineno-0-37"></a><span class="s1"> "gpu_memory_utilization": 0.9</span>
|
<a id="__codelineno-0-37" name="__codelineno-0-37" href="#__codelineno-0-37"></a><span class="s1"> "gpu_memory_utilization": 0.9</span>
|
||||||
<a id="__codelineno-0-38" name="__codelineno-0-38" href="#__codelineno-0-38"></a><span class="s1"> },</span>
|
<a id="__codelineno-0-38" name="__codelineno-0-38" href="#__codelineno-0-38"></a><span class="s1"> },</span>
|
||||||
<a id="__codelineno-0-39" name="__codelineno-0-39" href="#__codelineno-0-39"></a><span class="s1"> "auto_restart": true,</span>
|
<a id="__codelineno-0-39" name="__codelineno-0-39" href="#__codelineno-0-39"></a><span class="s1"> "auto_restart": true,</span>
|
||||||
<a id="__codelineno-0-40" name="__codelineno-0-40" href="#__codelineno-0-40"></a><span class="s1"> "on_demand_start": true</span>
|
<a id="__codelineno-0-40" name="__codelineno-0-40" href="#__codelineno-0-40"></a><span class="s1"> "on_demand_start": true,</span>
|
||||||
<a id="__codelineno-0-41" name="__codelineno-0-41" href="#__codelineno-0-41"></a><span class="s1"> }'</span>
|
<a id="__codelineno-0-41" name="__codelineno-0-41" href="#__codelineno-0-41"></a><span class="s1"> "environment": {</span>
|
||||||
<a id="__codelineno-0-42" name="__codelineno-0-42" href="#__codelineno-0-42"></a>
|
<a id="__codelineno-0-42" name="__codelineno-0-42" href="#__codelineno-0-42"></a><span class="s1"> "CUDA_VISIBLE_DEVICES": "0,1",</span>
|
||||||
<a id="__codelineno-0-43" name="__codelineno-0-43" href="#__codelineno-0-43"></a><span class="c1"># Create llama.cpp instance with HuggingFace model</span>
|
<a id="__codelineno-0-43" name="__codelineno-0-43" href="#__codelineno-0-43"></a><span class="s1"> "NCCL_DEBUG": "INFO",</span>
|
||||||
<a id="__codelineno-0-44" name="__codelineno-0-44" href="#__codelineno-0-44"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>http://localhost:8080/api/instances/gemma-3-27b<span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-0-44" name="__codelineno-0-44" href="#__codelineno-0-44"></a><span class="s1"> "PYTHONPATH": "/custom/path"</span>
|
||||||
<a id="__codelineno-0-45" name="__codelineno-0-45" href="#__codelineno-0-45"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
<a id="__codelineno-0-45" name="__codelineno-0-45" href="#__codelineno-0-45"></a><span class="s1"> }</span>
|
||||||
<a id="__codelineno-0-46" name="__codelineno-0-46" href="#__codelineno-0-46"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
<a id="__codelineno-0-46" name="__codelineno-0-46" href="#__codelineno-0-46"></a><span class="s1"> }'</span>
|
||||||
<a id="__codelineno-0-47" name="__codelineno-0-47" href="#__codelineno-0-47"></a><span class="s1"> "backend_type": "llama_cpp",</span>
|
<a id="__codelineno-0-47" name="__codelineno-0-47" href="#__codelineno-0-47"></a>
|
||||||
<a id="__codelineno-0-48" name="__codelineno-0-48" href="#__codelineno-0-48"></a><span class="s1"> "backend_options": {</span>
|
<a id="__codelineno-0-48" name="__codelineno-0-48" href="#__codelineno-0-48"></a><span class="c1"># Create llama.cpp instance with HuggingFace model</span>
|
||||||
<a id="__codelineno-0-49" name="__codelineno-0-49" href="#__codelineno-0-49"></a><span class="s1"> "hf_repo": "unsloth/gemma-3-27b-it-GGUF",</span>
|
<a id="__codelineno-0-49" name="__codelineno-0-49" href="#__codelineno-0-49"></a>curl<span class="w"> </span>-X<span class="w"> </span>POST<span class="w"> </span>http://localhost:8080/api/instances/gemma-3-27b<span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-0-50" name="__codelineno-0-50" href="#__codelineno-0-50"></a><span class="s1"> "hf_file": "gemma-3-27b-it-GGUF.gguf",</span>
|
<a id="__codelineno-0-50" name="__codelineno-0-50" href="#__codelineno-0-50"></a><span class="w"> </span>-H<span class="w"> </span><span class="s2">"Content-Type: application/json"</span><span class="w"> </span><span class="se">\</span>
|
||||||
<a id="__codelineno-0-51" name="__codelineno-0-51" href="#__codelineno-0-51"></a><span class="s1"> "gpu_layers": 32</span>
|
<a id="__codelineno-0-51" name="__codelineno-0-51" href="#__codelineno-0-51"></a><span class="w"> </span>-d<span class="w"> </span><span class="s1">'{</span>
|
||||||
<a id="__codelineno-0-52" name="__codelineno-0-52" href="#__codelineno-0-52"></a><span class="s1"> }</span>
|
<a id="__codelineno-0-52" name="__codelineno-0-52" href="#__codelineno-0-52"></a><span class="s1"> "backend_type": "llama_cpp",</span>
|
||||||
<a id="__codelineno-0-53" name="__codelineno-0-53" href="#__codelineno-0-53"></a><span class="s1"> }'</span>
|
<a id="__codelineno-0-53" name="__codelineno-0-53" href="#__codelineno-0-53"></a><span class="s1"> "backend_options": {</span>
|
||||||
|
<a id="__codelineno-0-54" name="__codelineno-0-54" href="#__codelineno-0-54"></a><span class="s1"> "hf_repo": "unsloth/gemma-3-27b-it-GGUF",</span>
|
||||||
|
<a id="__codelineno-0-55" name="__codelineno-0-55" href="#__codelineno-0-55"></a><span class="s1"> "hf_file": "gemma-3-27b-it-GGUF.gguf",</span>
|
||||||
|
<a id="__codelineno-0-56" name="__codelineno-0-56" href="#__codelineno-0-56"></a><span class="s1"> "gpu_layers": 32</span>
|
||||||
|
<a id="__codelineno-0-57" name="__codelineno-0-57" href="#__codelineno-0-57"></a><span class="s1"> }</span>
|
||||||
|
<a id="__codelineno-0-58" name="__codelineno-0-58" href="#__codelineno-0-58"></a><span class="s1"> }'</span>
|
||||||
</code></pre></div>
|
</code></pre></div>
|
||||||
<h2 id="start-instance">Start Instance<a class="headerlink" href="#start-instance" title="Permanent link">¶</a></h2>
|
<h2 id="start-instance">Start Instance<a class="headerlink" href="#start-instance" title="Permanent link">¶</a></h2>
|
||||||
<h3 id="via-web-ui_1">Via Web UI<a class="headerlink" href="#via-web-ui_1" title="Permanent link">¶</a></h3>
|
<h3 id="via-web-ui_1">Via Web UI<a class="headerlink" href="#via-web-ui_1" title="Permanent link">¶</a></h3>
|
||||||
@@ -1444,7 +1450,7 @@
|
|||||||
<span class="md-icon" title="Last update">
|
<span class="md-icon" title="Last update">
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M21 13.1c-.1 0-.3.1-.4.2l-1 1 2.1 2.1 1-1c.2-.2.2-.6 0-.8l-1.3-1.3c-.1-.1-.2-.2-.4-.2m-1.9 1.8-6.1 6V23h2.1l6.1-6.1-2.1-2M12.5 7v5.2l4 2.4-1 1L11 13V7h1.5M11 21.9c-5.1-.5-9-4.8-9-9.9C2 6.5 6.5 2 12 2c5.3 0 9.6 4.1 10 9.3-.3-.1-.6-.2-1-.2s-.7.1-1 .2C19.6 7.2 16.2 4 12 4c-4.4 0-8 3.6-8 8 0 4.1 3.1 7.5 7.1 7.9l-.1.2v1.8Z"/></svg>
|
||||||
</span>
|
</span>
|
||||||
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 21, 2025</span>
|
<span class="git-revision-date-localized-plugin git-revision-date-localized-plugin-date">September 28, 2025</span>
|
||||||
</span>
|
</span>
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user