@@ -1651,47 +1651,35 @@ SyncStageAccessFlags SyncStageAccess::AccessScope(VkPipelineStageFlags2KHR stage
1651
1651
return AccessScopeByStage (stages) & AccessScopeByAccess (accesses);
1652
1652
}
1653
1653
1654
+ // The semantics of the InfillUpdateOps of infill_update_range are slightly different than for the UpdateMemoryAccessState Action
1655
+ // operations, as this simplifies the generic traversal. So we wrap them in a semantics Adapter to get the same effect.
1654
1656
template <typename Action>
1655
- void UpdateMemoryAccessState (ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
1656
- // TODO: Optimization for operations that do a pure overwrite (i.e. WRITE usages which rewrite the state, vs READ usages
1657
- // that do incrementalupdates
1658
- assert (accesses);
1659
- if (range.empty ()) return ;
1660
- auto pos = accesses->lower_bound (range);
1661
- if (pos == accesses->end () || !pos->first .intersects (range)) {
1662
- // The range is empty, fill it with a default value.
1663
- pos = action.Infill (accesses, pos, range);
1664
- } else if (range.begin < pos->first .begin ) {
1665
- // Leading empty space, infill
1666
- pos = action.Infill (accesses, pos, ResourceAccessRange (range.begin , pos->first .begin ));
1667
- } else if (pos->first .begin < range.begin ) {
1668
- // Trim the beginning if needed
1669
- pos = accesses->split (pos, range.begin , sparse_container::split_op_keep_both ());
1670
- ++pos;
1671
- }
1672
-
1673
- const auto the_end = accesses->end ();
1674
- while ((pos != the_end) && pos->first .intersects (range)) {
1675
- if (pos->first .end > range.end ) {
1676
- pos = accesses->split (pos, range.end , sparse_container::split_op_keep_both ());
1677
- }
1678
-
1679
- pos = action (accesses, pos);
1680
- if (pos == the_end) break ;
1681
-
1682
- auto next = pos;
1683
- ++next;
1657
+ struct ActionToOpsAdapter {
1658
+ using Map = ResourceAccessRangeMap;
1659
+ using Range = typename Map::key_type;
1660
+ using Iterator = typename Map::iterator;
1661
+ using IndexType = typename Map::index_type;
1662
+
1663
+ void infill (Map &accesses, const Iterator &pos, const Range &infill_range) const {
1664
+ // Combine Infill and update operations to make the generic implementation simpler
1665
+ Iterator infill = action.Infill (&accesses, pos, infill_range);
1666
+ if (infill == accesses.end ()) return ; // Allow action to 'pass' on filling in the blanks
1667
+
1668
+ // Need to apply the action to the Infill. 'infill_update_range' expect ops.infill to be completely done with
1669
+ // the infill_range, where as Action::Infill assumes the caller will apply the action() logic to the infill_range
1670
+ for (; infill != pos; ++infill) {
1671
+ assert (infill != accesses.end ());
1672
+ action (infill);
1673
+ }
1674
+ }
1675
+ void update (const Iterator &pos) const { action (pos); }
1676
+ const Action &action;
1677
+ };
1684
1678
1685
- // Do gap infill or infill to end of range, if needed.
1686
- if (pos->first .end < range.end ) {
1687
- VkDeviceSize limit = (next == the_end) ? range.end : std::min (range.end , next->first .begin );
1688
- ResourceAccessRange new_range (pos->first .end , limit);
1689
- if (new_range.non_empty ()) {
1690
- next = action.Infill (accesses, next, new_range);
1691
- }
1692
- }
1693
- pos = next;
1694
- }
1679
+ template <typename Action>
1680
+ void UpdateMemoryAccessState (ResourceAccessRangeMap *accesses, const ResourceAccessRange &range, const Action &action) {
1681
+ ActionToOpsAdapter<Action> ops{action};
1682
+ infill_update_range (*accesses, range, ops);
1695
1683
}
1696
1684
1697
1685
// Give a comparable interface for range generators and ranges
@@ -1726,10 +1714,9 @@ struct UpdateMemoryAccessStateFunctor {
1726
1714
return accesses->lower_bound (range);
1727
1715
}
1728
1716
1729
- Iterator operator ()(ResourceAccessRangeMap *accesses, const Iterator &pos) const {
1717
+ void operator ()(const Iterator &pos) const {
1730
1718
auto &access_state = pos->second ;
1731
1719
access_state.Update (usage, ordering_rule, tag);
1732
- return pos;
1733
1720
}
1734
1721
1735
1722
UpdateMemoryAccessStateFunctor (AccessAddressType type_, const AccessContext &context_, SyncStageAccessIndex usage_,
@@ -1801,7 +1788,7 @@ class ApplyBarrierOpsFunctor {
1801
1788
return inserted;
1802
1789
}
1803
1790
1804
- Iterator operator ()(ResourceAccessRangeMap *accesses, const Iterator &pos) const {
1791
+ void operator ()(const Iterator &pos) const {
1805
1792
auto &access_state = pos->second ;
1806
1793
for (const auto &op : barrier_ops_) {
1807
1794
op (&access_state);
@@ -1812,7 +1799,6 @@ class ApplyBarrierOpsFunctor {
1812
1799
// another walk
1813
1800
access_state.ApplyPendingBarriers (tag_);
1814
1801
}
1815
- return pos;
1816
1802
}
1817
1803
1818
1804
// A valid tag is required IFF layout_transition is true, as transitions are write ops
0 commit comments