From e9f3d8dbd54c9c91aca477f263f3e93fd52b8326 Mon Sep 17 00:00:00 2001 From: Daniel Date: Tue, 14 Jan 2025 07:51:18 -0300 Subject: [PATCH] feat(prospective-parachains): implements `GetBackableCandidates` (#4374) Co-authored-by: EclesioMeloJunior --- .../prospective-parachains.go | 87 ++++- .../prospective_parachains_test.go | 358 ++++++++++++++++++ 2 files changed, 444 insertions(+), 1 deletion(-) create mode 100644 dot/parachain/prospective-parachains/prospective_parachains_test.go diff --git a/dot/parachain/prospective-parachains/prospective-parachains.go b/dot/parachain/prospective-parachains/prospective-parachains.go index effe9f02cb..489d80f3ef 100644 --- a/dot/parachain/prospective-parachains/prospective-parachains.go +++ b/dot/parachain/prospective-parachains/prospective-parachains.go @@ -6,12 +6,23 @@ import ( parachaintypes "github.com/ChainSafe/gossamer/dot/parachain/types" "github.com/ChainSafe/gossamer/internal/log" + "github.com/ChainSafe/gossamer/lib/common" ) var logger = log.NewFromGlobal(log.AddContext("pkg", "prospective_parachains"), log.SetLevel(log.Debug)) type ProspectiveParachains struct { SubsystemToOverseer chan<- any + View *view +} + +type view struct { + activeLeaves map[common.Hash]bool + perRelayParent map[common.Hash]*relayParentData +} + +type relayParentData struct { + fragmentChains map[parachaintypes.ParaID]*fragmentChain } // Name returns the name of the subsystem @@ -57,7 +68,7 @@ func (pp *ProspectiveParachains) processMessage(msg any) { case CandidateBacked: panic("not implemented yet: see issue #4309") case GetBackableCandidates: - panic("not implemented yet: see issue #4310") + pp.getBackableCandidates(msg) case GetHypotheticalMembership: panic("not implemented yet: see issue #4311") case GetMinimumRelayParents: @@ -80,3 +91,77 @@ func (*ProspectiveParachains) ProcessBlockFinalizedSignal(parachaintypes.BlockFi // NOTE: this subsystem does not process block finalized signal return nil } + +func (pp *ProspectiveParachains) getBackableCandidates( + msg GetBackableCandidates, +) { + // Extract details from the message + relayParentHash := msg.RelayParentHash + paraId := msg.ParaId + requestedQty := msg.RequestedQty + ancestors := msg.Ancestors + responseChan := msg.Response + + // Check if the relay parent is active + if _, exists := pp.View.activeLeaves[relayParentHash]; !exists { + logger.Debugf( + "Requested backable candidates for inactive relay-parent. "+ + "RelayParentHash: %v, ParaId: %v", + relayParentHash, paraId, + ) + responseChan <- []parachaintypes.CandidateHashAndRelayParent{} + return + } + + // Retrieve data for the relay parent + data, ok := pp.View.perRelayParent[relayParentHash] + if !ok { + logger.Debugf( + "Requested backable candidates for nonexistent relay-parent. "+ + "RelayParentHash: %v, ParaId: %v", + relayParentHash, paraId, + ) + responseChan <- []parachaintypes.CandidateHashAndRelayParent{} + return + } + + // Retrieve the fragment chain for the ParaID + chain, ok := data.fragmentChains[paraId] + if !ok { + logger.Debugf( + "Requested backable candidates for inactive ParaID. "+ + "RelayParentHash: %v, ParaId: %v", + relayParentHash, paraId, + ) + responseChan <- []parachaintypes.CandidateHashAndRelayParent{} + return + } + + // Retrieve backable candidates from the fragment chain + backableCandidates := chain.findBackableChain(ancestors, requestedQty) + if len(backableCandidates) == 0 { + logger.Debugf( + "No backable candidates found. RelayParentHash: %v, ParaId: %v, Ancestors: %v", + relayParentHash, paraId, ancestors, + ) + responseChan <- []parachaintypes.CandidateHashAndRelayParent{} + return + } + + logger.Debugf( + "Found backable candidates: %v. RelayParentHash: %v, ParaId: %v, Ancestors: %v", + backableCandidates, relayParentHash, paraId, ancestors, + ) + + // Convert backable candidates to the expected response format + candidateHashes := make([]parachaintypes.CandidateHashAndRelayParent, len(backableCandidates)) + for i, candidate := range backableCandidates { + candidateHashes[i] = parachaintypes.CandidateHashAndRelayParent{ + CandidateHash: candidate.candidateHash, + CandidateRelayParent: candidate.realyParentHash, + } + } + + // Send the result through the response channel + responseChan <- candidateHashes +} diff --git a/dot/parachain/prospective-parachains/prospective_parachains_test.go b/dot/parachain/prospective-parachains/prospective_parachains_test.go new file mode 100644 index 0000000000..b9d2d3b2c5 --- /dev/null +++ b/dot/parachain/prospective-parachains/prospective_parachains_test.go @@ -0,0 +1,358 @@ +package prospectiveparachains + +import ( + "bytes" + "testing" + + parachaintypes "github.com/ChainSafe/gossamer/dot/parachain/types" + "github.com/ChainSafe/gossamer/lib/common" + "github.com/stretchr/testify/assert" +) + +const MaxPoVSize = 1_000_000 + +func dummyPVD(parentHead parachaintypes.HeadData, relayParentNumber uint32) parachaintypes.PersistedValidationData { + return parachaintypes.PersistedValidationData{ + ParentHead: parentHead, + RelayParentNumber: relayParentNumber, + RelayParentStorageRoot: common.EmptyHash, + MaxPovSize: MaxPoVSize, + } +} + +func dummyCandidateReceiptBadSig( + relayParentHash common.Hash, + commitments *common.Hash, +) parachaintypes.CandidateReceipt { + var commitmentsHash common.Hash + + if commitments != nil { + commitmentsHash = *commitments + } else { + commitmentsHash = common.EmptyHash + } + + descriptor := parachaintypes.CandidateDescriptor{ + ParaID: parachaintypes.ParaID(0), + RelayParent: relayParentHash, + Collator: parachaintypes.CollatorID{}, + PovHash: common.EmptyHash, + ErasureRoot: common.EmptyHash, + Signature: parachaintypes.CollatorSignature{}, + ParaHead: common.EmptyHash, + ValidationCodeHash: parachaintypes.ValidationCodeHash{}, + PersistedValidationDataHash: common.EmptyHash, + } + + return parachaintypes.CandidateReceipt{ + CommitmentsHash: commitmentsHash, + Descriptor: descriptor, + } +} + +func makeCandidate( + relayParent common.Hash, + relayParentNumber uint32, + paraID parachaintypes.ParaID, + parentHead parachaintypes.HeadData, + headData parachaintypes.HeadData, + validationCodeHash parachaintypes.ValidationCodeHash, +) parachaintypes.CommittedCandidateReceipt { + pvd := dummyPVD(parentHead, relayParentNumber) + + commitments := parachaintypes.CandidateCommitments{ + HeadData: headData, + HorizontalMessages: []parachaintypes.OutboundHrmpMessage{}, + UpwardMessages: []parachaintypes.UpwardMessage{}, + NewValidationCode: nil, + ProcessedDownwardMessages: 0, + HrmpWatermark: relayParentNumber, + } + + commitmentsHash := commitments.Hash() + + candidate := dummyCandidateReceiptBadSig(relayParent, &commitmentsHash) + candidate.CommitmentsHash = commitmentsHash + candidate.Descriptor.ParaID = paraID + + pvdh, err := pvd.Hash() + + if err != nil { + panic(err) + } + + candidate.Descriptor.PersistedValidationDataHash = pvdh + candidate.Descriptor.ValidationCodeHash = validationCodeHash + + result := parachaintypes.CommittedCandidateReceipt{ + Descriptor: candidate.Descriptor, + Commitments: commitments, + } + + return result +} + +func TestGetBackableCandidates(t *testing.T) { + candidateRelayParent1 := common.Hash{0x01} + candidateRelayParent2 := common.Hash{0x02} + candidateRelayParent3 := common.Hash{0x03} + + paraId := parachaintypes.ParaID(1) + paraId2 := parachaintypes.ParaID(2) + paraId3 := parachaintypes.ParaID(3) + + parentHead1 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x01}, 32)} + parentHead2 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x02}, 32)} + parentHead3 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x03}, 32)} + + headData1 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x01}, 32)} + headData2 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x02}, 32)} + headData3 := parachaintypes.HeadData{Data: bytes.Repeat([]byte{0x03}, 32)} + + validationCodeHash := parachaintypes.ValidationCodeHash{} + + candidate1 := makeCandidate( + candidateRelayParent1, + uint32(10), + paraId, + parentHead1, + headData1, + validationCodeHash, + ) + + candidate2 := makeCandidate( + candidateRelayParent2, + uint32(9), + paraId2, + parentHead2, + headData2, + validationCodeHash, + ) + + candidate3 := makeCandidate( + candidateRelayParent3, + uint32(8), + paraId3, + parentHead3, + headData3, + validationCodeHash, + ) + + mockRelayParent := relayChainBlockInfo{ + Hash: candidateRelayParent1, + Number: 10, + } + + ancestors := []relayChainBlockInfo{ + { + Hash: candidateRelayParent2, + Number: 9, + }, + { + Hash: candidateRelayParent3, + Number: 8, + }, + } + + baseConstraints := ¶chaintypes.Constraints{ + MinRelayParentNumber: 8, + RequiredParent: parentHead1, + MaxPoVSize: MaxPoVSize, + } + + mockScope, err := newScopeWithAncestors(mockRelayParent, baseConstraints, nil, 10, ancestors) + assert.NoError(t, err) + + parentHash1, err := parentHead1.Hash() + assert.NoError(t, err) + + outputHash1, err := headData1.Hash() + assert.NoError(t, err) + + candidateStorage := newCandidateStorage() + err = candidateStorage.addCandidateEntry(&candidateEntry{ + candidateHash: parachaintypes.CandidateHash{Value: candidateRelayParent1}, + parentHeadDataHash: parentHash1, + outputHeadDataHash: outputHash1, + relayParent: candidateRelayParent1, + candidate: &prospectiveCandidate{ + Commitments: candidate1.Commitments, + PersistedValidationData: dummyPVD(parentHead1, 10), + PoVHash: candidate1.Descriptor.PovHash, + ValidationCodeHash: validationCodeHash, + }, + state: backed, + }) + assert.NoError(t, err) + + parentHash2, err := parentHead2.Hash() + assert.NoError(t, err) + + outputHash2, err := headData2.Hash() + assert.NoError(t, err) + + err = candidateStorage.addCandidateEntry(&candidateEntry{ + candidateHash: parachaintypes.CandidateHash{Value: candidateRelayParent2}, + parentHeadDataHash: parentHash2, + outputHeadDataHash: outputHash2, + relayParent: candidateRelayParent2, + candidate: &prospectiveCandidate{ + Commitments: candidate2.Commitments, + PersistedValidationData: dummyPVD(parentHead2, 9), + PoVHash: candidate2.Descriptor.PovHash, + ValidationCodeHash: validationCodeHash, + }, + state: backed, + }) + assert.NoError(t, err) + + parentHash3, err := parentHead3.Hash() + assert.NoError(t, err) + + outputHash3, err := headData3.Hash() + assert.NoError(t, err) + + err = candidateStorage.addCandidateEntry(&candidateEntry{ + candidateHash: parachaintypes.CandidateHash{Value: candidateRelayParent3}, + parentHeadDataHash: parentHash3, + outputHeadDataHash: outputHash3, + relayParent: candidateRelayParent3, + candidate: &prospectiveCandidate{ + Commitments: candidate3.Commitments, + PersistedValidationData: dummyPVD(parentHead3, 8), + PoVHash: candidate3.Descriptor.PovHash, + ValidationCodeHash: validationCodeHash, + }, + state: backed, + }) + assert.NoError(t, err) + + type testCase struct { + name string + view *view + msg GetBackableCandidates + expectedLength int + } + + cases := []testCase{ + { + name: "relay_parent_inactive", + view: &view{ + activeLeaves: map[common.Hash]bool{}, + perRelayParent: map[common.Hash]*relayParentData{}, + }, + msg: GetBackableCandidates{ + RelayParentHash: candidateRelayParent1, + ParaId: parachaintypes.ParaID(1), + RequestedQty: 1, + Ancestors: Ancestors{}, + Response: make(chan []parachaintypes.CandidateHashAndRelayParent, 1), + }, + expectedLength: 0, + }, + { + name: "active_leaves_empty", + view: &view{ + activeLeaves: map[common.Hash]bool{}, + perRelayParent: map[common.Hash]*relayParentData{ + candidateRelayParent1: { + fragmentChains: map[parachaintypes.ParaID]*fragmentChain{}, + }, + }, + }, + msg: GetBackableCandidates{ + RelayParentHash: candidateRelayParent1, + ParaId: parachaintypes.ParaID(1), + RequestedQty: 1, + Ancestors: Ancestors{}, + Response: make(chan []parachaintypes.CandidateHashAndRelayParent, 1), + }, + expectedLength: 0, + }, + { + name: "no_candidates_found", + view: &view{ + activeLeaves: map[common.Hash]bool{ + candidateRelayParent1: true, + }, + perRelayParent: map[common.Hash]*relayParentData{ + candidateRelayParent1: { + fragmentChains: map[parachaintypes.ParaID]*fragmentChain{}, + }, + }, + }, + msg: GetBackableCandidates{ + RelayParentHash: candidateRelayParent1, + ParaId: parachaintypes.ParaID(1), + RequestedQty: 1, + Ancestors: Ancestors{}, + Response: make(chan []parachaintypes.CandidateHashAndRelayParent, 1), + }, + expectedLength: 0, + }, + { + name: "not_found_parachain_based_on_paraid", + view: &view{ + activeLeaves: map[common.Hash]bool{ + candidateRelayParent1: true, + }, + perRelayParent: map[common.Hash]*relayParentData{ + candidateRelayParent1: { + fragmentChains: map[parachaintypes.ParaID]*fragmentChain{}, + }, + }, + }, + msg: GetBackableCandidates{ + RelayParentHash: candidateRelayParent1, + ParaId: parachaintypes.ParaID(3), + RequestedQty: 1, + Ancestors: Ancestors{}, + Response: make(chan []parachaintypes.CandidateHashAndRelayParent, 1), + }, + expectedLength: 0, + }, + { + name: "candidates_found", + view: &view{ + activeLeaves: map[common.Hash]bool{ + candidateRelayParent1: true, + }, + perRelayParent: map[common.Hash]*relayParentData{ + candidateRelayParent1: { + fragmentChains: map[parachaintypes.ParaID]*fragmentChain{ + parachaintypes.ParaID(1): newFragmentChain(mockScope, candidateStorage), + }, + }, + }, + }, + msg: GetBackableCandidates{ + RelayParentHash: candidateRelayParent1, + ParaId: parachaintypes.ParaID(1), + RequestedQty: 2, + Ancestors: Ancestors{ + parachaintypes.CandidateHash{Value: candidateRelayParent2}: struct{}{}, + parachaintypes.CandidateHash{Value: candidateRelayParent3}: struct{}{}, + }, + Response: make(chan []parachaintypes.CandidateHashAndRelayParent, 1), + }, + expectedLength: 1, + }, + } + + for _, tc := range cases { + t.Run(tc.name, func(t *testing.T) { + pp := &ProspectiveParachains{ + View: tc.view, + } + + pp.getBackableCandidates(tc.msg) + + select { + case result := <-tc.msg.Response: + assert.Equal(t, tc.expectedLength, len(result), "Unexpected number of candidates") + default: + t.Fatal("No response received from getBackableCandidates") + } + }) + } +}