|
62 | 62 | runs-on: [single-gpu, nvidia-gpu, t4, ci]
|
63 | 63 | container:
|
64 | 64 | image: diffusers/diffusers-pytorch-cuda
|
65 |
| - options: --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ --gpus 0 --privileged |
| 65 | + options: --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ --gpus 0 --privileged |
66 | 66 | steps:
|
67 | 67 | - name: Checkout diffusers
|
68 | 68 | uses: actions/checkout@v3
|
@@ -119,7 +119,7 @@ jobs:
|
119 | 119 | runs-on: docker-gpu
|
120 | 120 | container:
|
121 | 121 | image: diffusers/diffusers-pytorch-cuda
|
122 |
| - options: --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ --gpus 0 |
| 122 | + options: --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ --gpus 0 |
123 | 123 | defaults:
|
124 | 124 | run:
|
125 | 125 | shell: bash
|
@@ -171,7 +171,7 @@ jobs:
|
171 | 171 | runs-on: docker-gpu
|
172 | 172 | container:
|
173 | 173 | image: diffusers/diffusers-pytorch-cuda
|
174 |
| - options: --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ --gpus 0 |
| 174 | + options: --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ --gpus 0 |
175 | 175 | defaults:
|
176 | 176 | run:
|
177 | 177 | shell: bash
|
@@ -221,7 +221,7 @@ jobs:
|
221 | 221 | runs-on: docker-tpu
|
222 | 222 | container:
|
223 | 223 | image: diffusers/diffusers-flax-tpu
|
224 |
| - options: --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ --privileged |
| 224 | + options: --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ --privileged |
225 | 225 | defaults:
|
226 | 226 | run:
|
227 | 227 | shell: bash
|
@@ -268,7 +268,7 @@ jobs:
|
268 | 268 | runs-on: docker-gpu
|
269 | 269 | container:
|
270 | 270 | image: diffusers/diffusers-onnxruntime-cuda
|
271 |
| - options: --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ --gpus 0 |
| 271 | + options: --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ --gpus 0 |
272 | 272 | defaults:
|
273 | 273 | run:
|
274 | 274 | shell: bash
|
@@ -317,7 +317,7 @@ jobs:
|
317 | 317 |
|
318 | 318 | container:
|
319 | 319 | image: diffusers/diffusers-pytorch-compile-cuda
|
320 |
| - options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ |
| 320 | + options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ |
321 | 321 |
|
322 | 322 | steps:
|
323 | 323 | - name: Checkout diffusers
|
@@ -358,7 +358,7 @@ jobs:
|
358 | 358 |
|
359 | 359 | container:
|
360 | 360 | image: diffusers/diffusers-pytorch-xformers-cuda
|
361 |
| - options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ |
| 361 | + options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ |
362 | 362 |
|
363 | 363 | steps:
|
364 | 364 | - name: Checkout diffusers
|
@@ -399,7 +399,7 @@ jobs:
|
399 | 399 |
|
400 | 400 | container:
|
401 | 401 | image: diffusers/diffusers-pytorch-cuda
|
402 |
| - options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/hf_cache:/mnt/cache/ |
| 402 | + options: --gpus 0 --shm-size "16gb" --ipc host -v /mnt/cache/.cache/huggingface:/mnt/cache/ |
403 | 403 |
|
404 | 404 | steps:
|
405 | 405 | - name: Checkout diffusers
|
|
0 commit comments