File tree Expand file tree Collapse file tree 5 files changed +12
-12
lines changed Expand file tree Collapse file tree 5 files changed +12
-12
lines changed Original file line number Diff line number Diff line change 2525 group : aws-g6e-4xlarge
2626 container :
2727 image : diffusers/diffusers-pytorch-cuda
28- options : --shm-size "16gb" --ipc host --gpus 0
28+ options : --shm-size "16gb" --ipc host --gpus all
2929 steps :
3030 - name : Checkout diffusers
3131 uses : actions/checkout@v3
Original file line number Diff line number Diff line change 6161 group : aws-g4dn-2xlarge
6262 container :
6363 image : diffusers/diffusers-pytorch-cuda
64- options : --shm-size "16gb" --ipc host --gpus 0
64+ options : --shm-size "16gb" --ipc host --gpus all
6565 steps :
6666 - name : Checkout diffusers
6767 uses : actions/checkout@v3
@@ -107,7 +107,7 @@ jobs:
107107 group : aws-g4dn-2xlarge
108108 container :
109109 image : diffusers/diffusers-pytorch-cuda
110- options : --shm-size "16gb" --ipc host --gpus 0
110+ options : --shm-size "16gb" --ipc host --gpus all
111111 defaults :
112112 run :
113113 shell : bash
@@ -222,7 +222,7 @@ jobs:
222222 group : aws-g6e-xlarge-plus
223223 container :
224224 image : diffusers/diffusers-pytorch-cuda
225- options : --shm-size "16gb" --ipc host --gpus 0
225+ options : --shm-size "16gb" --ipc host --gpus all
226226 steps :
227227 - name : Checkout diffusers
228228 uses : actions/checkout@v3
@@ -270,7 +270,7 @@ jobs:
270270 group : aws-g4dn-2xlarge
271271 container :
272272 image : diffusers/diffusers-pytorch-minimum-cuda
273- options : --shm-size "16gb" --ipc host --gpus 0
273+ options : --shm-size "16gb" --ipc host --gpus all
274274 defaults :
275275 run :
276276 shell : bash
Original file line number Diff line number Diff line change @@ -118,7 +118,7 @@ jobs:
118118 group : aws-g4dn-2xlarge
119119 container :
120120 image : diffusers/diffusers-pytorch-cuda
121- options : --shm-size "16gb" --ipc host --gpus 0
121+ options : --shm-size "16gb" --ipc host --gpus all
122122 steps :
123123 - name : Checkout diffusers
124124 uses : actions/checkout@v3
@@ -183,7 +183,7 @@ jobs:
183183 group : aws-g4dn-2xlarge
184184 container :
185185 image : diffusers/diffusers-pytorch-cuda
186- options : --shm-size "16gb" --ipc host --gpus 0
186+ options : --shm-size "16gb" --ipc host --gpus all
187187 defaults :
188188 run :
189189 shell : bash
Original file line number Diff line number Diff line change 6464 group : aws-g4dn-2xlarge
6565 container :
6666 image : diffusers/diffusers-pytorch-cuda
67- options : --shm-size "16gb" --ipc host --gpus 0
67+ options : --shm-size "16gb" --ipc host --gpus all
6868 steps :
6969 - name : Checkout diffusers
7070 uses : actions/checkout@v3
@@ -109,7 +109,7 @@ jobs:
109109 group : aws-g4dn-2xlarge
110110 container :
111111 image : diffusers/diffusers-pytorch-cuda
112- options : --shm-size "16gb" --ipc host --gpus 0
112+ options : --shm-size "16gb" --ipc host --gpus all
113113 defaults :
114114 run :
115115 shell : bash
Original file line number Diff line number Diff line change 6262 group : aws-g4dn-2xlarge
6363 container :
6464 image : diffusers/diffusers-pytorch-cuda
65- options : --shm-size "16gb" --ipc host --gpus 0
65+ options : --shm-size "16gb" --ipc host --gpus all
6666 steps :
6767 - name : Checkout diffusers
6868 uses : actions/checkout@v3
@@ -107,7 +107,7 @@ jobs:
107107 group : aws-g4dn-2xlarge
108108 container :
109109 image : diffusers/diffusers-pytorch-cuda
110- options : --shm-size "16gb" --ipc host --gpus 0
110+ options : --shm-size "16gb" --ipc host --gpus all
111111 defaults :
112112 run :
113113 shell : bash
@@ -163,7 +163,7 @@ jobs:
163163 group : aws-g4dn-2xlarge
164164 container :
165165 image : diffusers/diffusers-pytorch-minimum-cuda
166- options : --shm-size "16gb" --ipc host --gpus 0
166+ options : --shm-size "16gb" --ipc host --gpus all
167167 defaults :
168168 run :
169169 shell : bash
You can’t perform that action at this time.
0 commit comments