@@ -54,7 +54,7 @@ vec2 SampleSphericalMap(in vec3 v)
5454 return uv;
5555}
5656
57- vec3 nbl_computeLighting(inout nbl_glsl_IsotropicViewSurfaceInteraction out_interaction, in mat2 dUV, in MC_precomputed_t precomp)
57+ vec3 nbl_computeLighting(inout nbl_glsl_IsotropicViewSurfaceInteraction out_interaction, in mat2 dUV, in nbl_glsl_MC_precomputed_t precomp)
5858{
5959 nbl_glsl_xoroshiro64star_state_t scramble_start_state = textureLod(scramblebuf,gl_FragCoord.xy/VIEWPORT_SZ,0).rg;
6060
@@ -63,16 +63,16 @@ vec3 nbl_computeLighting(inout nbl_glsl_IsotropicViewSurfaceInteraction out_inte
6363 vec3 color = vec3(0.0);
6464
6565#ifdef USE_ENVMAP
66- instr_stream_t gcs = getGenChoiceStream(precomp);
67- instr_stream_t rnps = getRemAndPdfStream(precomp);
66+ nbl_glsl_instr_stream_t gcs = getGenChoiceStream(precomp);
67+ nbl_glsl_instr_stream_t rnps = getRemAndPdfStream(precomp);
6868 for (int i = 0; i < SAMPLE_COUNT; ++i)
6969 {
7070 nbl_glsl_xoroshiro64star_state_t scramble_state = scramble_start_state;
7171
7272 vec3 rand = rand3d(i,scramble_state);
7373 float pdf;
7474 nbl_glsl_LightSample s;
75- vec3 rem = runGenerateAndRemainderStream (precomp, gcs, rnps, rand, pdf, s);
75+ vec3 rem = nbl_glsl_runGenerateAndRemainderStream (precomp, gcs, rnps, rand, pdf, s);
7676
7777 vec2 uv = SampleSphericalMap(s.L);
7878 color += rem*textureLod(envMap, uv, 0.0).xyz;
@@ -260,6 +260,23 @@ int main()
260260 params.WindowSize .Width = film.width ;
261261 params.WindowSize .Height = film.height ;
262262 }
263+ else return 1 ; // no cameras
264+
265+ const auto & sensor = globalMeta->sensors .front (); // always choose frist one
266+ auto isOkSensorType = [](const ext::MitsubaLoader::CElementSensor& sensor) -> bool {
267+ return sensor.type == ext::MitsubaLoader::CElementSensor::Type::PERSPECTIVE || sensor.type == ext::MitsubaLoader::CElementSensor::Type::THINLENS;
268+ };
269+
270+ if (!isOkSensorType (sensor))
271+ return 1 ;
272+
273+ bool leftHandedCamera = false ;
274+ {
275+ auto relativeTransform = sensor.transform .matrix .extractSub3x4 ();
276+ if (relativeTransform.getPseudoDeterminant ().x < 0 .f )
277+ leftHandedCamera = true ;
278+ }
279+
263280 params.DriverType = video::EDT_OPENGL;
264281 auto device = createDeviceEx (params);
265282
@@ -313,6 +330,7 @@ int main()
313330 // gather all meshes into core::vector and modify their pipelines
314331 core::vector<core::smart_refctd_ptr<asset::ICPUMesh>> cpumeshes;
315332 cpumeshes.reserve (meshes.getSize ());
333+ uint32_t cc = cpumeshes.capacity ();
316334 for (auto it = meshes.getContents ().begin (); it != meshes.getContents ().end (); ++it)
317335 {
318336 cpumeshes.push_back (core::smart_refctd_ptr_static_cast<asset::ICPUMesh>(std::move (*it)));
@@ -394,7 +412,7 @@ int main()
394412 }
395413 }
396414
397- constexpr uint32_t ENVMAP_SAMPLE_COUNT = 16u ;
415+ constexpr uint32_t ENVMAP_SAMPLE_COUNT = 64u ;
398416 constexpr float LIGHT_INTENSITY_SCALE = 0 .01f ;
399417
400418 core::unordered_set<const asset::ICPURenderpassIndependentPipeline*> modifiedPipelines;
@@ -418,6 +436,8 @@ int main()
418436 modifiedShaders.insert ({ core::smart_refctd_ptr<asset::ICPUSpecializedShader>(fs),newfs });
419437 pipeline->setShaderAtStage (asset::ICPUSpecializedShader::ESS_FRAGMENT, newfs.get ());
420438 }
439+ // invert what is recognized as frontface in case of RH camera
440+ pipeline->getRasterizationParams ().frontFaceIsCCW = !leftHandedCamera;
421441 modifiedPipelines.insert (pipeline);
422442 }
423443 }
@@ -442,7 +462,6 @@ int main()
442462 }
443463 }
444464
445- // auto gpuVT = core::make_smart_refctd_ptr<video::IGPUVirtualTexture>(driver, globalMeta->VT.get());
446465 auto gpuds0 = driver->getGPUObjectsFromAssets (&cpuds0.get (), &cpuds0.get ()+1 )->front ();
447466
448467 auto gpuds1layout = driver->getGPUObjectsFromAssets (&ds1layout, &ds1layout+1 )->front ();
@@ -572,28 +591,21 @@ int main()
572591 scene::ICameraSceneNode* camera = nullptr ;
573592 core::recti viewport (core::position2di (0 ,0 ), core::position2di (params.WindowSize .Width ,params.WindowSize .Height ));
574593
575- auto isOkSensorType = [](const ext::MitsubaLoader::CElementSensor& sensor) -> bool {
576- return sensor.type ==ext::MitsubaLoader::CElementSensor::Type::PERSPECTIVE || sensor.type ==ext::MitsubaLoader::CElementSensor::Type::THINLENS;
577- };
578594// #define TESTING
579595#ifdef TESTING
580596 if (0 )
581597#else
582598 if (globalMeta->sensors .size () && isOkSensorType (globalMeta->sensors .front ()))
583599#endif
584600 {
585- const auto & sensor = globalMeta->sensors .front ();
586601 const auto & film = sensor.film ;
587602 viewport = core::recti (core::position2di (film.cropOffsetX ,film.cropOffsetY ), core::position2di (film.cropWidth ,film.cropHeight ));
588603
589604 auto extent = sceneBound.getExtent ();
590605 camera = smgr->addCameraSceneNodeFPS (nullptr ,100 .f ,core::min (extent.X ,extent.Y ,extent.Z )*0 .0001f );
591606 // need to extract individual components
592- bool leftHandedCamera = false ;
593607 {
594608 auto relativeTransform = sensor.transform .matrix .extractSub3x4 ();
595- if (relativeTransform.getPseudoDeterminant ().x < 0 .f )
596- leftHandedCamera = true ;
597609
598610 auto pos = relativeTransform.getTranslation ();
599611 camera->setPosition (pos.getAsVector3df ());
@@ -697,7 +709,7 @@ int main()
697709 uboData.NormalMat [11 ] = camera->getPosition ().Z ;
698710 driver->updateBufferRangeViaStagingBuffer (gpuubo.get (), 0u , sizeof (uboData), &uboData);
699711
700- for (uint32_t j = 1u ; j < gpumeshes->size (); ++j)
712+ for (uint32_t j = 0u ; j < gpumeshes->size (); ++j)
701713 {
702714 auto & mesh = (*gpumeshes)[j];
703715
0 commit comments