We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent baeb002 commit fc79606Copy full SHA for fc79606
src/axolotl/cli/main.py
@@ -28,7 +28,6 @@
28
fetch_from_github,
29
filter_none_kwargs,
30
)
31
-from axolotl.cli.vllm_serve import do_vllm_serve
32
from axolotl.integrations.lm_eval.cli import lm_eval
33
from axolotl.utils import set_pytorch_cuda_alloc_conf
34
from axolotl.utils.schemas.config import AxolotlInputConfig
@@ -327,6 +326,8 @@ def fetch(directory: str, dest: Optional[str]) -> None:
327
326
@add_options_from_dataclass(VllmServeCliArgs)
328
@filter_none_kwargs
329
def vllm_serve(config: str, **cli_args: VllmServeCliArgs):
+ from axolotl.cli.vllm_serve import do_vllm_serve
330
+
331
do_vllm_serve(config, cli_args)
332
333
0 commit comments