@@ -474,6 +474,60 @@ def testInstancesShareHashes( self ) :
474474 self .assertEqual ( instance0 .hash ( hashType , 0 ), instance1 .hash ( hashType , 0 ) )
475475 self .assertEqual ( notInstance .hash ( hashType , 0 ), instance0 .hash ( hashType , 0 ) )
476476
477+ def testInstancePrototypeHashesNotReused ( self ) :
478+
479+ # The original intent of this test was to check that we assign consistent hashes to prototypes when
480+ # opening and closing the same file ( which triggers USD to randomly shuffle the prototype names ).
481+
482+ # The solution we've ended up with is instead of assigning consistent hashes, each instance of the
483+ # file gets it's own unique hashes, and is basically treated separately. This allows us to just
484+ # use the prototype names in the hash, since we force the hash to be unique anyway.
485+
486+ usedHashes = set ()
487+
488+ for i in range ( 100 ):
489+ scene = IECoreScene .SceneInterface .create (
490+ os .path .dirname ( __file__ ) + "/data/severalInstances.usda" ,
491+ IECore .IndexedIO .OpenMode .Read
492+ )
493+
494+ instance0 = scene .child ( "instance0" )
495+ instance0Child = instance0 .child ( "world" )
496+ instance10 = scene .child ( "instance10" )
497+ instance10Child = instance10 .child ( "model" ).child ( "assembly" )
498+
499+ h1 = instance0Child .hash ( scene .HashType .TransformHash , 1 )
500+ h2 = instance10Child .hash ( scene .HashType .TransformHash , 1 )
501+
502+ self .assertNotEqual ( h1 , h2 )
503+
504+ self .assertNotIn ( h1 , usedHashes )
505+ for j in range ( 1 , 10 ):
506+ instanceJ = scene .child ( "instance%i" % j )
507+ self .assertEqual ( h1 , instanceJ .child ( "world" ).hash ( scene .HashType .TransformHash , 1 ) )
508+ del instanceJ
509+
510+ self .assertNotIn ( h2 , usedHashes )
511+ for j in range ( 11 , 20 ):
512+ instanceJ = scene .child ( "instance%i" % j )
513+ self .assertEqual ( h2 , instanceJ .child ( "model" ).child ( "assembly" ).hash ( scene .HashType .TransformHash , 1 ) )
514+ del instanceJ
515+
516+ usedHashes .add ( h1 )
517+ usedHashes .add ( h2 )
518+
519+ # We must carefully delete everything in order to reliably trigger USD randomly switching the
520+ # prototype names around ( I guess waiting for the garbage collector means we might not be
521+ # fully closing the file before we open it again? Weird, but seems reproducible ).
522+ # This is no longer really crucial to this test, since we just force every instance of the file
523+ # to get unique hashes rather than trying to keep prototypes hashing the same, but I'm trying to
524+ # document as much intent as possible here, in case we consider a different solution in the future.
525+ del instance0Child
526+ del instance0
527+ del instance10Child
528+ del instance10
529+ del scene
530+
477531 def testGeometricInterpretation ( self ) :
478532
479533 primitive = IECoreScene .PointsPrimitive ( IECore .V3fVectorData ( [ imath .V3f ( 0 ) ] ) )
0 commit comments