|
| 1 | +""" |
| 2 | +Example demonstrating camera sensors with different rendering backends. |
| 3 | +
|
| 4 | +Creating cameras as sensors using add_sensor() with three backends |
| 5 | +Rasterizer, Raytracer and BatchRenderer. |
| 6 | +Test the attachment, add light, batch rendering functionalities. |
| 7 | +""" |
| 8 | + |
| 9 | +import os |
| 10 | +import matplotlib.pyplot as plt |
| 11 | +import genesis as gs |
| 12 | +from genesis.utils.misc import tensor_to_array |
| 13 | +from genesis.options.sensors import RasterizerCameraOptions, RaytracerCameraOptions, BatchRendererCameraOptions |
| 14 | + |
| 15 | +########################## init ########################## |
| 16 | +gs.init(seed=0, precision="32", backend=gs.gpu, logging_level="info") |
| 17 | + |
| 18 | +########################## check dependencies ########################## |
| 19 | +# Try to import LuisaRenderPy to determine if raytracer is available |
| 20 | +try: |
| 21 | + import LuisaRenderPy |
| 22 | + |
| 23 | + ENABLE_RAYTRACER = True |
| 24 | + print("✓ LuisaRenderPy available - Raytracer will be enabled") |
| 25 | +except ImportError: |
| 26 | + ENABLE_RAYTRACER = False |
| 27 | + print("⊘ LuisaRenderPy not available - Raytracer will be disabled") |
| 28 | + |
| 29 | +try: |
| 30 | + import gs_madrona |
| 31 | + |
| 32 | + ENABLE_MADRONA = True |
| 33 | + print("✓ gs_madrona available - BatchRenderer will be enabled") |
| 34 | +except ImportError: |
| 35 | + ENABLE_MADRONA = False |
| 36 | + print("⊘ gs_madrona not available - BatchRenderer will be disabled") |
| 37 | +ENABLE_MADRONA = ENABLE_MADRONA and (gs.backend == gs.cuda) |
| 38 | +########################## create a scene ########################## |
| 39 | +# Choose renderer based on raytracer availability |
| 40 | +if ENABLE_RAYTRACER: |
| 41 | + renderer = gs.renderers.RayTracer( |
| 42 | + env_surface=gs.surfaces.Emission( |
| 43 | + emissive_texture=gs.textures.ColorTexture(color=(0.2, 0.3, 0.5)), |
| 44 | + ), |
| 45 | + env_radius=20.0, |
| 46 | + ) |
| 47 | +else: |
| 48 | + # Use Rasterizer as fallback renderer |
| 49 | + renderer = gs.renderers.Rasterizer() |
| 50 | + |
| 51 | +scene = gs.Scene( |
| 52 | + rigid_options=gs.options.RigidOptions( |
| 53 | + enable_collision=True, |
| 54 | + gravity=(0, 0, -9.8), |
| 55 | + ), |
| 56 | + renderer=renderer, |
| 57 | + show_viewer=False, |
| 58 | +) |
| 59 | + |
| 60 | +########################## entities ########################## |
| 61 | +plane = scene.add_entity( |
| 62 | + morph=gs.morphs.Plane(), |
| 63 | + surface=gs.surfaces.Rough(color=(0.4, 0.4, 0.4)), |
| 64 | +) |
| 65 | + |
| 66 | +sphere = scene.add_entity( |
| 67 | + morph=gs.morphs.Sphere(pos=(0.0, 0.0, 2.0), radius=0.5), |
| 68 | + surface=gs.surfaces.Smooth(color=(1.0, 0.5, 0.5)), |
| 69 | +) |
| 70 | + |
| 71 | +box = scene.add_entity( |
| 72 | + morph=gs.morphs.Box(pos=(1.0, 1.0, 1.0), size=(0.3, 0.3, 0.3)), |
| 73 | + surface=gs.surfaces.Rough(color=(0.5, 1.0, 0.5)), |
| 74 | +) |
| 75 | + |
| 76 | +########################## Camera Configurations ########################## |
| 77 | +# Define common camera parameters |
| 78 | +CAMERA_COMMON_KWARGS = dict( |
| 79 | + { |
| 80 | + "up": (0.0, 0.0, 1.0), |
| 81 | + "near": 0.1, |
| 82 | + "far": 100.0, |
| 83 | + } |
| 84 | +) |
| 85 | + |
| 86 | +CAMERA_SENSORS_KWARGS = [ |
| 87 | + { |
| 88 | + "name": "cam0", |
| 89 | + "pos": (3.0, 0.0, 2.0), |
| 90 | + "lookat": (0.0, 0.0, 1.0), |
| 91 | + "fov": 60.0, |
| 92 | + "attachment": None, # No attachment |
| 93 | + "lights": [{"pos": (2.0, 2.0, 5.0), "color": (1.0, 1.0, 1.0), "intensity": 1.0}], |
| 94 | + }, |
| 95 | + { |
| 96 | + "name": "cam1", |
| 97 | + "pos": (0.0, 3.0, 2.0), |
| 98 | + "lookat": (0.0, 0.0, 1.0), |
| 99 | + "fov": 60.0, |
| 100 | + "attachment": None, |
| 101 | + "lights": [], |
| 102 | + }, |
| 103 | + { |
| 104 | + "name": "cam_attached", |
| 105 | + "pos": (0.0, 0.0, 1.0), |
| 106 | + "lookat": (0.0, 0.0, 0.0), |
| 107 | + "fov": 70.0, |
| 108 | + "attachment": { |
| 109 | + "entity_idx": None, |
| 110 | + "link_idx_local": 0, |
| 111 | + "pos_offset": (0.0, 0.0, 0.0), |
| 112 | + "euler_offset": (0.0, 0.0, 0.0), |
| 113 | + }, |
| 114 | + "lights": [], |
| 115 | + }, |
| 116 | +] |
| 117 | + |
| 118 | + |
| 119 | +# Create camera configurations for all backends |
| 120 | +backends = [ |
| 121 | + ("raster", RasterizerCameraOptions, True), # Always enabled |
| 122 | + ("raytrace", RaytracerCameraOptions, ENABLE_RAYTRACER), |
| 123 | + ("batch", BatchRendererCameraOptions, ENABLE_MADRONA), |
| 124 | +] |
| 125 | + |
| 126 | +backend_configs = {} |
| 127 | +for backend_name, options_class, enabled in backends: |
| 128 | + if not enabled: |
| 129 | + continue |
| 130 | + |
| 131 | + configs = [] |
| 132 | + for camera_config in CAMERA_SENSORS_KWARGS: |
| 133 | + name = f"{backend_name}_{camera_config['name']}" |
| 134 | + res = (500, 600) |
| 135 | + |
| 136 | + # Create options with common and backend-specific parameters |
| 137 | + options_kwargs = { |
| 138 | + "res": res, |
| 139 | + "pos": camera_config["pos"], |
| 140 | + "lookat": camera_config["lookat"], |
| 141 | + "up": CAMERA_COMMON_KWARGS["up"], |
| 142 | + "fov": camera_config["fov"], |
| 143 | + "lights": camera_config["lights"], |
| 144 | + } |
| 145 | + |
| 146 | + # Handle attachment |
| 147 | + attachment = camera_config["attachment"] |
| 148 | + if attachment is not None: |
| 149 | + # For attached cameras, set the entity_idx to the sphere's index |
| 150 | + options_kwargs.update( |
| 151 | + { |
| 152 | + "entity_idx": sphere.idx, |
| 153 | + "link_idx_local": attachment["link_idx_local"], |
| 154 | + "pos_offset": attachment["pos_offset"], |
| 155 | + "euler_offset": attachment["euler_offset"], |
| 156 | + } |
| 157 | + ) |
| 158 | + |
| 159 | + # Add backend-specific parameters |
| 160 | + if backend_name == "raster": |
| 161 | + options_kwargs.update({"near": CAMERA_COMMON_KWARGS["near"], "far": CAMERA_COMMON_KWARGS["far"]}) |
| 162 | + elif backend_name == "raytrace": |
| 163 | + options_kwargs.update( |
| 164 | + { |
| 165 | + "model": "pinhole", |
| 166 | + "spp": 64, |
| 167 | + "denoise": False, |
| 168 | + } |
| 169 | + ) |
| 170 | + if attachment is None: # Only add env surface for non-attached cameras |
| 171 | + options_kwargs.update( |
| 172 | + { |
| 173 | + "env_surface": gs.surfaces.Emission( |
| 174 | + emissive_texture=gs.textures.ColorTexture(color=(0.2, 0.3, 0.5)), |
| 175 | + ), |
| 176 | + "env_radius": 20.0, |
| 177 | + } |
| 178 | + ) |
| 179 | + elif backend_name == "batch": |
| 180 | + options_kwargs.update({"use_rasterizer": True}) |
| 181 | + if camera_config["lights"]: |
| 182 | + adjusted_lights = [{**light, "directional": False} for light in camera_config["lights"]] |
| 183 | + options_kwargs["lights"] = adjusted_lights |
| 184 | + |
| 185 | + # Adjust lights for raytracer (different intensity/color) |
| 186 | + if backend_name == "raytrace" and camera_config["lights"]: |
| 187 | + adjusted_lights = [ |
| 188 | + {**light, "color": (10.0, 10.0, 10.0), "intensity": 1.0} for light in camera_config["lights"] |
| 189 | + ] |
| 190 | + options_kwargs["lights"] = adjusted_lights |
| 191 | + |
| 192 | + options = options_class(**options_kwargs) |
| 193 | + configs.append( |
| 194 | + { |
| 195 | + "name": name, |
| 196 | + "options": options, |
| 197 | + "attachment": camera_config["attachment"], |
| 198 | + } |
| 199 | + ) |
| 200 | + |
| 201 | + backend_configs[backend_name] = configs |
| 202 | + |
| 203 | + |
| 204 | +########################## Create Cameras ########################## |
| 205 | +cameras = {} |
| 206 | + |
| 207 | +for group_name, configs in backend_configs.items(): |
| 208 | + print(f"\n=== {group_name} Cameras ===") |
| 209 | + for config in configs: |
| 210 | + camera = scene.add_sensor(config["options"]) |
| 211 | + cameras[config["name"]] = camera |
| 212 | + |
| 213 | + print(f"✓ Created {len(configs)} {group_name.lower()} cameras") |
| 214 | + |
| 215 | + |
| 216 | +########################## build ########################## |
| 217 | +n_envs = 1 |
| 218 | +scene.build(n_envs=n_envs) |
| 219 | + |
| 220 | +########################## identify attached cameras ########################## |
| 221 | +print("\n=== Identifying Attached Cameras ===") |
| 222 | + |
| 223 | +# Identify cameras that are configured to be attached |
| 224 | +attached_cameras = [] |
| 225 | +for group_name, configs in backend_configs.items(): |
| 226 | + for config in configs: |
| 227 | + if config["attachment"] is not None: |
| 228 | + camera = cameras[config["name"]] |
| 229 | + attached_cameras.append(camera) |
| 230 | + print(f"✓ {config['name']} is attached to sphere") |
| 231 | + |
| 232 | +print(f"✓ Identified {len(attached_cameras)} attached cameras") |
| 233 | + |
| 234 | +########################## simulate and render ########################## |
| 235 | +os.makedirs("camera_sensor_output", exist_ok=True) |
| 236 | + |
| 237 | +for i in range(100): |
| 238 | + scene.step() |
| 239 | + # Render every 10 steps |
| 240 | + if i % 10 == 0: |
| 241 | + print(f"\n--- Step {i} ---") |
| 242 | + |
| 243 | + camera_data = {} |
| 244 | + for cam_name, camera in cameras.items(): |
| 245 | + data = camera.read() |
| 246 | + camera_data[cam_name] = data |
| 247 | + print(f" {cam_name.replace('_', ' ').title()} RGB shape: {data.rgb.shape}") |
| 248 | + |
| 249 | + for cam_name, data in camera_data.items(): |
| 250 | + rgb_data = data.rgb[0] if data.rgb.ndim > 3 else data.rgb |
| 251 | + suffix = "_env0" if n_envs > 1 else "" |
| 252 | + filename = f"camera_sensor_output/{cam_name}{suffix}_step{i:03d}.png" |
| 253 | + plt.imsave(filename, tensor_to_array(rgb_data)) |
0 commit comments