Skip to content
This repository was archived by the owner on Jun 5, 2025. It is now read-only.

Commit 0ed1365

Browse files
authored
Run the integration tests in parallel (#887)
Signed-off-by: Radoslav Dimitrov <[email protected]>
1 parent 6fe5ed1 commit 0ed1365

File tree

1 file changed

+67
-79
lines changed

1 file changed

+67
-79
lines changed

.github/workflows/integration-tests.yml

Lines changed: 67 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,23 @@ on:
1818

1919
jobs:
2020
integration-tests:
21-
name: Run
21+
name: Test
2222
runs-on: ubuntu-latest
2323
strategy:
24+
fail-fast: false # Continue running other tests if one fails
2425
matrix:
2526
python-version: [ "3.12" ]
27+
test-provider: [ "copilot", "openai", "anthropic", "ollama", "vllm", "llamacpp" ]
2628
env:
27-
CONTAINER_NAME: "codegate"
28-
CERT_FILE: "/app/codegate_volume/certs/ca.crt"
29+
ENV_COPILOT_KEY: ${{ secrets.copilot-key }}
30+
ENV_OPENAI_KEY: ${{ secrets.copilot-key }} # We use the same key for OpenAI as the Copilot tests
31+
ENV_ANTHROPIC_KEY: ${{ secrets.anthropic-key }}
32+
CA_CERT_FILE: "/home/runner/work/codegate/codegate/codegate_volume/certs/ca.crt"
33+
CODEGATE_CONTAINER_NAME: "codegate"
34+
CODEGATE_MOUNT_PATH_CERT_FILE: "/app/codegate_volume/certs/ca.crt"
35+
CODEGATE_LOG_LEVEL: "DEBUG"
36+
LOCAL_OLLAMA_URL: "http://localhost:11434"
37+
LOCAL_VLLM_URL: "http://localhost:8000"
2938
steps:
3039
- name: Checkout
3140
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -40,55 +49,55 @@ jobs:
4049
mkdir -p ./codegate_volume/certs ./codegate_volume/models ./codegate_volume/db
4150
chmod -R 777 ./codegate_volume
4251
43-
- name: Download Docker image artifact
52+
- name: Download the CodeGate container image
4453
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4
4554
with:
4655
name: ${{ inputs.artifact-name }}
4756

48-
- name: Load Docker image
57+
- name: Load the CodeGate container image
4958
run: |
5059
docker load -i image.tar
5160
echo "Loaded image:"
5261
docker images
5362
54-
- name: Download the Qwen2.5-Coder-0.5B-Instruct-GGUF model
63+
- name: Download the Qwen2.5-Coder-0.5B-Instruct-GGUF model (llamacpp only)
64+
if: ${{ matrix.test-provider == 'llamacpp' }} # This is only needed for llamacpp
5565
run: |
56-
# This is needed for the llamacpp integration tests
5766
wget -P ./codegate_volume/models https://huggingface.co/Qwen/Qwen2.5-Coder-0.5B-Instruct-GGUF/resolve/main/qwen2.5-coder-0.5b-instruct-q5_k_m.gguf
5867
59-
- name: Run container from the loaded image
68+
- name: Start the CodeGate container
6069
run: |
6170
# Get the image name
6271
DOCKER_IMAGE=$(docker images --format "{{.Repository}}:{{.Tag}}" | head -n 1)
6372
echo "Running container from image: $DOCKER_IMAGE"
6473
6574
# Run the container
66-
docker run --name $CONTAINER_NAME -d --network host \
75+
docker run --name $CODEGATE_CONTAINER_NAME -d --network host \
6776
-v "$(pwd)"/codegate_volume:/app/codegate_volume \
68-
-e CODEGATE_APP_LOG_LEVEL=DEBUG \
69-
-e CODEGATE_OLLAMA_URL=http://localhost:11434 \
70-
-e CODEGATE_VLLM_URL=http://localhost:8000 \
77+
-e CODEGATE_APP_LOG_LEVEL=$CODEGATE_LOG_LEVEL \
78+
-e CODEGATE_OLLAMA_URL=$LOCAL_OLLAMA_URL \
79+
-e CODEGATE_VLLM_URL=$LOCAL_VLLM_URL \
7180
--restart unless-stopped $DOCKER_IMAGE
7281
7382
# Confirm the container started
7483
echo "Container started:"
7584
docker ps
7685
7786
# Verify container is running with correct ports
78-
docker ps -f name=$CONTAINER_NAME
87+
docker ps -f name=$CODEGATE_CONTAINER_NAME
7988
8089
# Check mount configuration
81-
docker inspect $CONTAINER_NAME -f '{{ json .Mounts }}' | jq
90+
docker inspect $CODEGATE_CONTAINER_NAME -f '{{ json .Mounts }}' | jq
8291
8392
- name: Ensure certificates are available in the container
8493
timeout-minutes: 4
8594
run: |
8695
# Wait for the cert file to be available in the container
8796
while true; do
88-
echo "Checking for $CERT_FILE in container $CONTAINER_NAME..."
97+
echo "Checking for $CODEGATE_MOUNT_PATH_CERT_FILE in container $CODEGATE_CONTAINER_NAME..."
8998
90-
if docker exec "$CONTAINER_NAME" test -f "$CERT_FILE"; then
91-
echo "Cert file found: $CERT_FILE"
99+
if docker exec "$CODEGATE_CONTAINER_NAME" test -f "$CODEGATE_MOUNT_PATH_CERT_FILE"; then
100+
echo "Cert file found: $CODEGATE_MOUNT_PATH_CERT_FILE"
92101
break
93102
else
94103
echo "Cert file not found. Retrying in 5 seconds..."
@@ -97,12 +106,9 @@ jobs:
97106
done
98107
99108
# Verify volume contents are accessible
100-
docker exec $CONTAINER_NAME ls -la /app/codegate_volume
109+
docker exec $CODEGATE_CONTAINER_NAME ls -la /app/codegate_volume
101110
102-
# Print the container logs we got so far
103-
docker logs $CONTAINER_NAME
104-
105-
- name: Install the CodeGate certificate
111+
- name: Copy and install the CodeGate certificate
106112
run: |
107113
docker cp codegate:/app/codegate_volume/certs/ca.crt ./codegate.crt
108114
sudo cp ./codegate.crt /usr/local/share/ca-certificates/codegate.crt
@@ -130,29 +136,8 @@ jobs:
130136
- name: Install dependencies
131137
run: poetry install --with dev
132138

133-
- name: Run integration tests - Copilot
134-
env:
135-
CODEGATE_PROVIDERS: "copilot"
136-
CA_CERT_FILE: "/home/runner/work/codegate/codegate/codegate_volume/certs/ca.crt"
137-
ENV_COPILOT_KEY: ${{ secrets.copilot-key }}
138-
run: |
139-
poetry run python tests/integration/integration_tests.py
140-
141-
- name: Run integration tests - OpenAI
142-
env:
143-
CODEGATE_PROVIDERS: "openai"
144-
ENV_OPENAI_KEY: ${{ secrets.copilot-key }} # We use the same key for OpenAI as the Copilot tests
145-
run: |
146-
poetry run python tests/integration/integration_tests.py
147-
148-
- name: Run integration tests - Anthropic
149-
env:
150-
CODEGATE_PROVIDERS: "anthropic"
151-
ENV_ANTHROPIC_KEY: ${{ secrets.anthropic-key }}
152-
run: |
153-
poetry run python tests/integration/integration_tests.py
154-
155-
- name: Run Ollama
139+
- name: Run the Ollama container (ollama-only)
140+
if: ${{ matrix.test-provider == 'ollama' }} # This is only needed for Ollama
156141
run: |
157142
docker run -d -v ollama:/root/.ollama --network host --name ollama ollama/ollama
158143
docker ps -f name=ollama
@@ -162,10 +147,28 @@ jobs:
162147
sleep 2
163148
done
164149
echo "Ollama is now available!"
150+
151+
# Run the model
165152
docker exec -d ollama ollama run qwen2.5-coder:0.5b
166153
167-
sleep 120 # Sleep for 2 minutes to allow Ollama to download the model. TODO: Improve this
168-
docker logs ollama
154+
echo "Waiting for model to be ready..."
155+
while true; do
156+
# Try to make a test query to the model
157+
response=$(curl -s http://localhost:11434/api/generate -d '{
158+
"model": "qwen2.5-coder:0.5b",
159+
"prompt": "Why is the sky blue?",
160+
"stream": false
161+
}' 2>&1)
162+
163+
# Check if the response contains an error
164+
if echo "$response" | grep -q "error"; then
165+
echo "Model not ready yet. Retrying in 5 seconds..."
166+
sleep 5
167+
else
168+
echo "Model is ready!"
169+
break
170+
fi
171+
done
169172
170173
# Verify the Ollama API is working
171174
curl http://localhost:11434/api/generate -d '{
@@ -174,30 +177,17 @@ jobs:
174177
"stream": false
175178
}'
176179
177-
docker logs ollama
178-
179-
- name: Run integration tests - Ollama
180-
env:
181-
CODEGATE_PROVIDERS: "ollama"
182-
run: |
183-
poetry run python tests/integration/integration_tests.py
184-
185-
- name: Print the Ollama container logs (useful for debugging)
186-
if: always()
187-
run: |
188-
docker logs ollama
189-
190-
- name: Build and run the vllm container
180+
- name: Build and run the vllm container (vllm-only)
181+
if: ${{ matrix.test-provider == 'vllm' }} # This is only needed for VLLM
191182
run: |
183+
# We clone the VLLM repo and build the container because the CPU-mode container is not published
192184
git clone https://github.com/vllm-project/vllm.git
193185
cd vllm
194186
docker build -f Dockerfile.cpu -t vllm-cpu-env --shm-size=4g .
195187
docker run -d --name vllm \
196188
--network="host" \
197189
vllm-cpu-env --model Qwen/Qwen2.5-Coder-0.5B-Instruct
198190
199-
- name: Verify the vllm container is running
200-
run: |
201191
echo -e "\nVerify the vllm container is serving\n"
202192
docker ps -f name=vllm
203193
@@ -230,37 +220,35 @@ jobs:
230220
"extra_body": {}
231221
}'
232222
233-
# Print a new line and then the message in a single echo
234223
echo -e "\nPrint the vllm container logs\n"
235224
docker logs vllm
236225
237-
- name: Run integration tests - vllm
238-
env:
239-
CODEGATE_PROVIDERS: "vllm"
240-
run: |
241-
poetry run python tests/integration/integration_tests.py
242-
243-
- name: Run integration tests - llamacpp
226+
- name: Tests - ${{ matrix.test-provider }}
244227
env:
245-
CODEGATE_PROVIDERS: "llamacpp"
228+
CODEGATE_PROVIDERS: ${{ matrix.test-provider }}
246229
run: |
247230
poetry run python tests/integration/integration_tests.py
248231
249-
- name: Print the CodeGate container logs (useful for debugging)
232+
- name: Print the CodeGate container logs
250233
if: always()
251234
run: |
252-
docker logs $CONTAINER_NAME
235+
docker logs $CODEGATE_CONTAINER_NAME
253236
echo "Models contents:"
254237
ls -la codegate_volume/models
255-
docker exec $CONTAINER_NAME ls -la /app/codegate_volume/models
238+
docker exec $CODEGATE_CONTAINER_NAME ls -la /app/codegate_volume/models
256239
echo "Certs contents:"
257240
ls -la codegate_volume/certs
258-
docker exec $CONTAINER_NAME ls -la /app/codegate_volume/certs
241+
docker exec $CODEGATE_CONTAINER_NAME ls -la /app/codegate_volume/certs
259242
echo "DB contents:"
260243
ls -la codegate_volume/db
261-
docker exec $CONTAINER_NAME ls -la /app/codegate_volume/db
244+
docker exec $CODEGATE_CONTAINER_NAME ls -la /app/codegate_volume/db
262245
263-
- name: Print the vllm container logs (useful for debugging)
264-
if: always()
246+
- name: Print the vllm container logs (vllm-only)
247+
if: ${{ matrix.test-provider == 'vllm' }} # This is only needed for VLLM
265248
run: |
266249
docker logs vllm
250+
251+
- name: Print the Ollama container logs (ollama-only)
252+
if: ${{ matrix.test-provider == 'ollama' }} # This is only needed for Ollama
253+
run: |
254+
docker logs ollama

0 commit comments

Comments
 (0)