55 - AI & Machine Learning
66languages :
77 - python
8+ published_at : 2024-11-07
9+ updated_at : 2024-11-07
810---
911
1012# Use Cloud GPUs for rendering your Blender projects
@@ -29,9 +31,12 @@ Here's a final render that was done using cloud GPUs:
2931
3032## Prerequisites
3133
34+ This guide may require you have basic knowledge on how Blender works, and the access to or ability to create Blender scenes for testing.
35+
3236- [ UV] ( https://docs.astral.sh/uv/ ) - for dependency management
3337- The [ Nitric CLI] ( /get-started/installation )
3438- An [ AWS] ( https://aws.amazon.com ) or [ Google Cloud] ( https://cloud.google.com ) account (_ your choice_ )
39+ - [ Blender] ( https://www.blender.org/download/ ) - for creating your Blender scenes
3540
3641## Getting started
3742
@@ -171,29 +176,29 @@ async def get_render(ctx: HttpContext):
171176
172177 return ctx
173178
174- @main_api.put (" /:blend" )
175- async def write_render (ctx : HttpContext):
176- blend_scene_key = ctx.req.params[" blend" ]
177-
178- # Write the blend scene rendering settings
179- raw_metadata = {
180- " file_format" : str (ctx.req.query.get(' file_format' , [' PNG' ])[0 ]),
181- " fps" : int (ctx.req.query.get(' fps' , [0 ])[0 ]),
182- " device" : str (ctx.req.query.get(' device' , [' GPU' ])[0 ]),
183- " engine" : str (ctx.req.query.get(' engine' , [' CYCLES' ])[0 ]),
184- " animate" : bool (ctx.req.query.get(' animate' , [False ])[0 ]),
185- }
186- metadata = bytes (json.dumps(raw_metadata), encoding = " utf-8" )
179+ @main_api.put (" /:blend" )
180+ async def write_render (ctx : HttpContext):
181+ blend_scene_key = ctx.req.params[" blend" ]
187182
188- await readable_writeable_blend_bucket.file(f " metadata- { blend_scene_key} .json " ).write(metadata)
183+ # Write the blend scene rendering settings
184+ raw_metadata = {
185+ " file_format" : str (ctx.req.query.get(' file_format' , [' PNG' ])[0 ]),
186+ " fps" : int (ctx.req.query.get(' fps' , [0 ])[0 ]),
187+ " device" : str (ctx.req.query.get(' device' , [' GPU' ])[0 ]),
188+ " engine" : str (ctx.req.query.get(' engine' , [' CYCLES' ])[0 ]),
189+ " animate" : bool (ctx.req.query.get(' animate' , [False ])[0 ]),
190+ }
191+ metadata = bytes (json.dumps(raw_metadata), encoding = " utf-8" )
189192
190- # Write the blend scene to the bucket using an upload URL
191- blend_upload_url = await readable_writeable_blend_bucket.file(f " blend- { blend_scene_key} .blend " ).upload_url()
193+ await readable_writeable_blend_bucket.file(f " metadata- { blend_scene_key} .json " ).write(metadata)
192194
193- ctx.res.headers[ " Location " ] = blend_upload_url
194- ctx.res.status = 307
195+ # Write the blend scene to the bucket using an upload URL
196+ blend_upload_url = await readable_writeable_blend_bucket.file( f " blend- { blend_scene_key } .blend " ).upload_url()
195197
196- return ctx
198+ ctx.res.headers[" Location" ] = blend_upload_url
199+ ctx.res.status = 307
200+
201+ return ctx
197202
198203 Nitric.run()
199204```
@@ -204,50 +209,50 @@ We will add a storage listener which will be triggered by files being added to t
204209# !collapse(1:46) collapsed
205210import json
206211
207- from nitric.application import Nitric
208- from nitric.context import HttpContext
209- from nitric.resources import BucketNotificationContext
212+ from nitric.application import Nitric
213+ from nitric.context import HttpContext
214+ from nitric.resources import BucketNotificationContext
210215
211- from common.resources import rendered_bucket, main_api, blend_bucket, renderer_job
216+ from common.resources import rendered_bucket, main_api, blend_bucket, renderer_job
212217
213- readable_rendered_bucket = rendered_bucket.allow(" read" )
214- readable_writeable_blend_bucket = blend_bucket.allow(" write" , " read" )
215- submittable_renderer_job = renderer_job.allow(" submit" )
218+ readable_rendered_bucket = rendered_bucket.allow(" read" )
219+ readable_writeable_blend_bucket = blend_bucket.allow(" write" , " read" )
220+ submittable_renderer_job = renderer_job.allow(" submit" )
216221
217- @main_api.get (" /render/:file" )
218- async def get_render (ctx : HttpContext):
219- file_name = ctx.req.params[' file' ]
222+ @main_api.get (" /render/:file" )
223+ async def get_render (ctx : HttpContext):
224+ file_name = ctx.req.params[' file' ]
220225
221- download_url = await readable_writeable_blend_bucket.file(file_name).download_url(3600 )
226+ download_url = await readable_writeable_blend_bucket.file(file_name).download_url(3600 )
222227
223- ctx.res.headers[" Location" ] = download_url
224- ctx.res.status = 303
228+ ctx.res.headers[" Location" ] = download_url
229+ ctx.res.status = 303
225230
226- return ctx
231+ return ctx
227232
228- @main_api.put (" /:blend" )
229- async def write_render (ctx : HttpContext):
230- blend_scene_key = ctx.req.params[" blend" ]
231-
232- # Write the blend scene rendering settings
233- raw_metadata = {
234- " file_format" : str (ctx.req.query.get(' file_format' , [' PNG' ])[0 ]),
235- " fps" : int (ctx.req.query.get(' fps' , [0 ])[0 ]),
236- " device" : str (ctx.req.query.get(' device' , [' GPU' ])[0 ]),
237- " engine" : str (ctx.req.query.get(' engine' , [' CYCLES' ])[0 ]),
238- " animate" : bool (ctx.req.query.get(' animate' , [False ])[0 ]),
239- }
240- metadata = bytes (json.dumps(raw_metadata), encoding = " utf-8" )
233+ @main_api.put (" /:blend" )
234+ async def write_render (ctx : HttpContext):
235+ blend_scene_key = ctx.req.params[" blend" ]
241236
242- await readable_writeable_blend_bucket.file(f " metadata- { blend_scene_key} .json " ).write(metadata)
237+ # Write the blend scene rendering settings
238+ raw_metadata = {
239+ " file_format" : str (ctx.req.query.get(' file_format' , [' PNG' ])[0 ]),
240+ " fps" : int (ctx.req.query.get(' fps' , [0 ])[0 ]),
241+ " device" : str (ctx.req.query.get(' device' , [' GPU' ])[0 ]),
242+ " engine" : str (ctx.req.query.get(' engine' , [' CYCLES' ])[0 ]),
243+ " animate" : bool (ctx.req.query.get(' animate' , [False ])[0 ]),
244+ }
245+ metadata = bytes (json.dumps(raw_metadata), encoding = " utf-8" )
243246
244- # Write the blend scene to the bucket using an upload URL
245- blend_upload_url = await readable_writeable_blend_bucket.file(f " blend- { blend_scene_key} .blend " ).upload_url()
247+ await readable_writeable_blend_bucket.file(f " metadata- { blend_scene_key} .json " ).write(metadata)
246248
247- ctx.res.headers[ " Location " ] = blend_upload_url
248- ctx.res.status = 307
249+ # Write the blend scene to the bucket using an upload URL
250+ blend_upload_url = await readable_writeable_blend_bucket.file( f " blend- { blend_scene_key } .blend " ).upload_url()
249251
250- return ctx
252+ ctx.res.headers[" Location" ] = blend_upload_url
253+ ctx.res.status = 307
254+
255+ return ctx
251256
252257@blend_bucket.on (" write" , " blend-" )
253258async def on_written_image (ctx : BucketNotificationContext):
@@ -813,21 +818,21 @@ We'll also need to add `batch-services` as a preview feature.
813818` ` ` yaml title:nitric.yaml
814819# !collapse(1:17) collapsed
815820name: blender-render
816- services:
817- - match: services/*.py
818- start: uv run watchmedo auto-restart -p *.py --no-restart-on-command-exit -R python -- -u $SERVICE_PATH
819- runtime: python
820-
821- batch-services:
822- - match: batches/*.py
823- start: uv run watchmedo auto-restart -p *.py --no-restart-on-command-exit -R python -- -u $SERVICE_PATH
824- runtime: blender
825-
826- runtimes:
827- blender:
828- dockerfile: blender.dockerfile
829- python:
830- dockerfile: python.dockerfile
821+ services:
822+ - match: services/*.py
823+ start: uv run watchmedo auto-restart -p *.py --no-restart-on-command-exit -R python -- -u $SERVICE_PATH
824+ runtime: python
825+
826+ batch-services:
827+ - match: batches/*.py
828+ start: uv run watchmedo auto-restart -p *.py --no-restart-on-command-exit -R python -- -u $SERVICE_PATH
829+ runtime: blender
830+
831+ runtimes:
832+ blender:
833+ dockerfile: blender.dockerfile
834+ python:
835+ dockerfile: python.dockerfile
831836
832837preview:
833838 - batch-services
0 commit comments