Skip to content

Commit

Permalink
SceneAlgo : Added hierarchyHash
Browse files Browse the repository at this point in the history
  • Loading branch information
danieldresser-ie committed Feb 11, 2025
1 parent 5df3b7a commit 0de2dd6
Show file tree
Hide file tree
Showing 5 changed files with 211 additions and 0 deletions.
1 change: 1 addition & 0 deletions Changes.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ API

- SceneAlgo :
- Added `parallelReduceLocations()` for implementing functions that need to combine results while traversing a ScenePlug.
- Added `hierarchyHash()` for hashing all children of a scene location.


1.5.5.0 (relative to 1.5.4.1)
Expand Down
6 changes: 6 additions & 0 deletions include/GafferScene/SceneAlgo.h
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,12 @@ GAFFERSCENE_API IECore::PathMatcher linkedLights( const ScenePlug *scene, const
/// Returns the paths to all lights which are linked to at least one of the specified objects.
GAFFERSCENE_API IECore::PathMatcher linkedLights( const ScenePlug *scene, const IECore::PathMatcher &objects );

/// Complex hashing
/// ===============

// Hashes all properties of a location and all its children. Does not include set membership.
GAFFERSCENE_API IECore::MurmurHash hierarchyHash( const ScenePlug *scene, const ScenePlug::ScenePath &root );

/// Miscellaneous
/// =============

Expand Down
140 changes: 140 additions & 0 deletions python/GafferSceneTest/SceneAlgoTest.py
Original file line number Diff line number Diff line change
Expand Up @@ -2197,6 +2197,146 @@ def testMatchingPathsPerformance( self ) :
result = IECore.PathMatcher()
GafferScene.SceneAlgo.matchingPaths( pathMatcher, scene, result )

def testHierarchyHash( self ) :

# We need to check that changing basically anything about a scene will result in a unique hash

baseGroup = GafferScene.Group()

# Temporarily disable testing of bounds ( we're trying to test features independently here, and
# a lot of the other things we're testing have cross-talk with the bounds )
testScene = GafferScene.ScenePlug()
testScene.setInput( baseGroup["out"] )
testScene["bound"].setInput( None )

hashes = set()
def assertHashUnique():
h = GafferScene.SceneAlgo.hierarchyHash( testScene, "/group" )
self.assertNotIn( h, hashes )
hashes.add( h )

def assertHashNotUnique():
h = GafferScene.SceneAlgo.hierarchyHash( testScene, "/group" )
self.assertIn( h, hashes )

assertHashUnique()

cube = GafferScene.Cube()
baseGroup["in"][0].setInput( cube["out"] )

assertHashUnique()

sphere = GafferScene.Sphere()
baseGroup["in"][1].setInput( sphere["out"] )

self.assertEqual( baseGroup["out"].childNames( "/group" ), IECore.InternedStringVectorData( [ "cube", "sphere" ] ) )
assertHashUnique()

# Documenting current edge case behaviour: changing the order of children, but nothing else.
# We've decided we do want this to affect the hash.
baseGroup["in"][1].setInput( cube["out"] )
baseGroup["in"][0].setInput( sphere["out"] )
self.assertEqual( baseGroup["out"].childNames( "/group" ), IECore.InternedStringVectorData( [ "sphere", "cube" ] ) )

assertHashUnique()

sphere["divisions"].setValue( imath.V2i( 3, 6 ) )

assertHashUnique()

sphere["transform"]["translate"]["x"].setValue( 2 )

assertHashUnique()

cube["transform"]["rotate"]["y"].setValue( 30 )

assertHashUnique()

sphereFilter = GafferScene.PathFilter()
sphereFilter["paths"].setValue( IECore.StringVectorData( [ '/sphere' ] ) )
customAttributes = GafferScene.CustomAttributes()
customAttributes["in"].setInput( sphere["out"] )
customAttributes["filter"].setInput( sphereFilter["out"] )
baseGroup["in"][0].setInput( customAttributes["out"] )

# No attributes added yet
assertHashNotUnique()

customAttributes["attributes"].addChild( Gaffer.NameValuePlug( "foo", Gaffer.StringPlug( "value", defaultValue = 'foo' ), True, "member1" ) )

# But now it should change
assertHashUnique()

# We can add additional levels of hierarchy inside the scene we're traversing
subGroup = GafferScene.Group()
subGroup["in"][0].setInput( customAttributes["out"] )

baseGroup["in"][0].setInput( subGroup["out"] )

assertHashUnique()

# And changes within the deeper hierarchy still affect the hash

customAttributes["attributes"][0]["value"].setValue( "blah" )

assertHashUnique()

sphere["transform"]["translate"]["x"].setValue( 3 )

assertHashUnique()

# We've now tested basically everything with the exception of the bound - it's hard to specifically
# test the bound - if we use existing Gaffer nodes, basically anything we do to affect the bound
# would also affect one of the other properties we've already tested ... so we would see the hash
# change, but it's hard to be certain of why. So instead, here's an extremely synthetic test just to
# confirm that we do check the bound

loosePlug = GafferScene.ScenePlug()
hashA = GafferScene.SceneAlgo.hierarchyHash( loosePlug, "/" )
loosePlug["bound"].setValue( imath.Box3f( imath.V3f( 0 ), imath.V3f( 1 ) ) )
self.assertNotEqual( GafferScene.SceneAlgo.hierarchyHash( loosePlug, "/" ), hashA )

# Hashing a hierarchy that is parented somewhere should have exactly the same effect

group = GafferScene.Group()
group["in"][0].setInput( baseGroup["out"] )

self.assertEqual(
GafferScene.SceneAlgo.hierarchyHash( baseGroup["out"], "/group" ),
GafferScene.SceneAlgo.hierarchyHash( group["out"], "/group/group" )
)

@GafferTest.TestRunner.PerformanceTestMethod( repeat = 1 )
def testHierarchyHashPerf( self ):

sphere = GafferScene.Sphere()

pathFilter = GafferScene.PathFilter()
pathFilter["paths"].setValue( IECore.StringVectorData( [ '/sphere' ] ) )

duplicate = GafferScene.Duplicate()
duplicate["in"].setInput( sphere["out"] )
duplicate["filter"].setInput( pathFilter["out"] )
duplicate["copies"].setValue( 300000 )


# Get everything warm - we want to measure the overhead of hierarchyHash, not the cost of
# actually computing everything it requires
GafferScene.SceneAlgo.hierarchyHash( duplicate["out"], "/" )

# Even with this attempt to cache things, it's still not a very interesting test. What we
# really want to know is whether we're imposing any extra overhead in how we combine hashes,
# or with the machinery of parallelReduceLocations itself. But repeatedly hashing plugs is
# so much more expensive than any of the other stuff that all we're doing, we're really just
# seeing the time to do the hashes. This seems to be true even if I make the hash cache massive:
# even pulling hashes from the cache has a cost.
#
# It's good that we're not imposing measurable overhead here, but it means this
# doesn't tell us much about how well parallelReduceLocations can theoretically work.

with GafferTest.TestRunner.PerformanceScope() :
GafferScene.SceneAlgo.hierarchyHash( duplicate["out"], "/" )

def testRenderAdaptors( self ) :

sphere = GafferScene.Sphere()
Expand Down
54 changes: 54 additions & 0 deletions src/GafferScene/SceneAlgo.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1238,6 +1238,60 @@ IECore::PathMatcher GafferScene::SceneAlgo::linkedLights( const ScenePlug *scene
return result.intersection( scene->set( g_lights )->readable() );
}

//////////////////////////////////////////////////////////////////////////
// Complex hashing
//////////////////////////////////////////////////////////////////////////

IECore::MurmurHash GafferScene::SceneAlgo::hierarchyHash( const ScenePlug *scene, const ScenePlug::ScenePath &root )
{
return GafferScene::SceneAlgo::parallelReduceLocations(
scene,
IECore::MurmurHash(),
[&] ( const ScenePlug *scene, const ScenePlug::ScenePath &path )
{
IECore::MurmurHash h;
if( path.size() > root.size() )
{
h.append( path.back() );
}

// Feels a bit silly to be hashing this is in when we're also hashing the paths of our children - it
// feels a bit redundant. But our children are visited out of order, so including this is how we can
// catch differences between hierarchies that differ solely in the order of their children.
scene->childNamesPlug()->hash( h );

scene->boundPlug()->hash( h );
scene->transformPlug()->hash( h );
scene->attributesPlug()->hash( h );
scene->objectPlug()->hash( h );


return h;
},
[]( IECore::MurmurHash &result, const IECore::MurmurHash &childrenResult )
{
// By doing an actual append with the child results, we ensure that their hash is properly
// mixed with the path leaf name hashed in above, so we won't get incorrect matches if the same
// children were assigned to different locations.
result.append( childrenResult );
},
[]( IECore::MurmurHash &result, const IECore::MurmurHash &sibling )
{
// We want our resulting hash to be deterministic, despite the order things are visited in not
// being deterministic. We achieve this by doing a simple commutative add here instead of hashing.
// Because the inputs are proper hashes with their bits evenly distributed, and they include their
// paths in the hash, we should not get a matching sum unless the inputs match ( or we experience an
// extremely unlikely collision ). See the comment in ThreadablePathHashAccumulator for more
// discussion of why we can get away with this.
result = IECore::MurmurHash(
result.h1() + sibling.h1(),
result.h2() + sibling.h2()
);
},
root
);
}

//////////////////////////////////////////////////////////////////////////
// Miscellaneous
//////////////////////////////////////////////////////////////////////////
Expand Down
10 changes: 10 additions & 0 deletions src/GafferSceneModule/SceneAlgoBinding.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -363,6 +363,12 @@ IECore::PathMatcher linkedLightsWrapper2( const GafferScene::ScenePlug &scene, c
return SceneAlgo::linkedLights( &scene, objects );
}

IECore::MurmurHash hierarchyHashWrapper( const ScenePlug &scene, const ScenePlug::ScenePath &location )
{
IECorePython::ScopedGILRelease r;
return SceneAlgo::hierarchyHash( &scene, location );
}

struct RenderAdaptorWrapper
{

Expand Down Expand Up @@ -493,6 +499,10 @@ void bindSceneAlgo()
def( "linkedLights", &linkedLightsWrapper1 );
def( "linkedLights", &linkedLightsWrapper2 );

// Complex hashing

def( "hierarchyHash", &hierarchyHashWrapper );

// Render adaptors

def( "registerRenderAdaptor", &registerRenderAdaptorWrapper, ( arg( "name" ), arg( "adaptor" ), arg( "client" ) = "*", arg( "renderer" ) = "*" ) );
Expand Down

0 comments on commit 0de2dd6

Please sign in to comment.