A proxy for vLLM.
# Create virtual environment using specific Python version
poetry env use 3.11.12
# Activate virtual environment
source .venv/bin/activate
# Install development dependencies
poetry install
# Run local mock vllm
cd docker/local
docker compose -f docker-compose.local.yml up -d
# Run vllm-proxy server locally
cd ../..
uvicorn src.app.main:app --host 0.0.0.0 --reload
# Run all tests
pytest tests
# Run specific test file
pytest tests/app/test_openai.py