Skip to content

Commit 4efb4db

Browse files
authored
enable all gpus when running ci. (#12062)
1 parent 639fd12 commit 4efb4db

File tree

5 files changed

+12
-12
lines changed

5 files changed

+12
-12
lines changed

.github/workflows/benchmark.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ jobs:
2525
group: aws-g6e-4xlarge
2626
container:
2727
image: diffusers/diffusers-pytorch-cuda
28-
options: --shm-size "16gb" --ipc host --gpus 0
28+
options: --shm-size "16gb" --ipc host --gpus all
2929
steps:
3030
- name: Checkout diffusers
3131
uses: actions/checkout@v3

.github/workflows/nightly_tests.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ jobs:
6161
group: aws-g4dn-2xlarge
6262
container:
6363
image: diffusers/diffusers-pytorch-cuda
64-
options: --shm-size "16gb" --ipc host --gpus 0
64+
options: --shm-size "16gb" --ipc host --gpus all
6565
steps:
6666
- name: Checkout diffusers
6767
uses: actions/checkout@v3
@@ -107,7 +107,7 @@ jobs:
107107
group: aws-g4dn-2xlarge
108108
container:
109109
image: diffusers/diffusers-pytorch-cuda
110-
options: --shm-size "16gb" --ipc host --gpus 0
110+
options: --shm-size "16gb" --ipc host --gpus all
111111
defaults:
112112
run:
113113
shell: bash
@@ -222,7 +222,7 @@ jobs:
222222
group: aws-g6e-xlarge-plus
223223
container:
224224
image: diffusers/diffusers-pytorch-cuda
225-
options: --shm-size "16gb" --ipc host --gpus 0
225+
options: --shm-size "16gb" --ipc host --gpus all
226226
steps:
227227
- name: Checkout diffusers
228228
uses: actions/checkout@v3
@@ -270,7 +270,7 @@ jobs:
270270
group: aws-g4dn-2xlarge
271271
container:
272272
image: diffusers/diffusers-pytorch-minimum-cuda
273-
options: --shm-size "16gb" --ipc host --gpus 0
273+
options: --shm-size "16gb" --ipc host --gpus all
274274
defaults:
275275
run:
276276
shell: bash

.github/workflows/pr_tests_gpu.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ jobs:
118118
group: aws-g4dn-2xlarge
119119
container:
120120
image: diffusers/diffusers-pytorch-cuda
121-
options: --shm-size "16gb" --ipc host --gpus 0
121+
options: --shm-size "16gb" --ipc host --gpus all
122122
steps:
123123
- name: Checkout diffusers
124124
uses: actions/checkout@v3
@@ -183,7 +183,7 @@ jobs:
183183
group: aws-g4dn-2xlarge
184184
container:
185185
image: diffusers/diffusers-pytorch-cuda
186-
options: --shm-size "16gb" --ipc host --gpus 0
186+
options: --shm-size "16gb" --ipc host --gpus all
187187
defaults:
188188
run:
189189
shell: bash

.github/workflows/push_tests.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ jobs:
6464
group: aws-g4dn-2xlarge
6565
container:
6666
image: diffusers/diffusers-pytorch-cuda
67-
options: --shm-size "16gb" --ipc host --gpus 0
67+
options: --shm-size "16gb" --ipc host --gpus all
6868
steps:
6969
- name: Checkout diffusers
7070
uses: actions/checkout@v3
@@ -109,7 +109,7 @@ jobs:
109109
group: aws-g4dn-2xlarge
110110
container:
111111
image: diffusers/diffusers-pytorch-cuda
112-
options: --shm-size "16gb" --ipc host --gpus 0
112+
options: --shm-size "16gb" --ipc host --gpus all
113113
defaults:
114114
run:
115115
shell: bash

.github/workflows/release_tests_fast.yml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ jobs:
6262
group: aws-g4dn-2xlarge
6363
container:
6464
image: diffusers/diffusers-pytorch-cuda
65-
options: --shm-size "16gb" --ipc host --gpus 0
65+
options: --shm-size "16gb" --ipc host --gpus all
6666
steps:
6767
- name: Checkout diffusers
6868
uses: actions/checkout@v3
@@ -107,7 +107,7 @@ jobs:
107107
group: aws-g4dn-2xlarge
108108
container:
109109
image: diffusers/diffusers-pytorch-cuda
110-
options: --shm-size "16gb" --ipc host --gpus 0
110+
options: --shm-size "16gb" --ipc host --gpus all
111111
defaults:
112112
run:
113113
shell: bash
@@ -163,7 +163,7 @@ jobs:
163163
group: aws-g4dn-2xlarge
164164
container:
165165
image: diffusers/diffusers-pytorch-minimum-cuda
166-
options: --shm-size "16gb" --ipc host --gpus 0
166+
options: --shm-size "16gb" --ipc host --gpus all
167167
defaults:
168168
run:
169169
shell: bash

0 commit comments

Comments
 (0)