|
|
@@ -24,9 +24,9 @@ uv pip install -e .
|
|
|
uv pip install vllm --torch-backend=auto
|
|
|
|
|
|
# 解决vllm版本兼容问题,检查当前版本
|
|
|
-pip list | grep -E "(vllm|transformers)"
|
|
|
+pip list | grep -E "(vllm|transformers|torch|flash_attn|flashinfer)"
|
|
|
# 使用最新兼容版本
|
|
|
-pip install --upgrade vllm transformers torch
|
|
|
+pip install --upgrade vllm transformers torch flash_attn flashinfer
|
|
|
|
|
|
# 查看端口占用情况
|
|
|
sudo lsof -i:8101
|