|
| 1 | +type progressedChain = { |
| 2 | + chainId: int, |
| 3 | + batchSize: int, |
| 4 | + progressBlockNumber: int, |
| 5 | + progressNextBlockLogIndex: option<int>, |
| 6 | + totalEventsProcessed: int, |
| 7 | +} |
| 8 | + |
| 9 | +type t = { |
| 10 | + items: array<Internal.eventItem>, |
| 11 | + progressedChains: array<progressedChain>, |
| 12 | + fetchStates: ChainMap.t<FetchState.t>, |
| 13 | + dcsToStoreByChainId: dict<array<FetchState.indexingContract>>, |
| 14 | +} |
| 15 | + |
| 16 | +type multiChainEventComparitor = { |
| 17 | + chain: ChainMap.Chain.t, |
| 18 | + earliestEvent: FetchState.queueItem, |
| 19 | +} |
| 20 | + |
| 21 | +let getComparitorFromItem = (queueItem: Internal.eventItem) => { |
| 22 | + let {timestamp, chain, blockNumber, logIndex} = queueItem |
| 23 | + EventUtils.getEventComparator({ |
| 24 | + timestamp, |
| 25 | + chainId: chain->ChainMap.Chain.toChainId, |
| 26 | + blockNumber, |
| 27 | + logIndex, |
| 28 | + }) |
| 29 | +} |
| 30 | + |
| 31 | +let getQueueItemComparitor = (earliestQueueItem: FetchState.queueItem, ~chain) => { |
| 32 | + switch earliestQueueItem { |
| 33 | + | Item({item}) => item->getComparitorFromItem |
| 34 | + | NoItem({latestFetchedBlock: {blockTimestamp, blockNumber}}) => ( |
| 35 | + blockTimestamp, |
| 36 | + chain->ChainMap.Chain.toChainId, |
| 37 | + blockNumber, |
| 38 | + 0, |
| 39 | + ) |
| 40 | + } |
| 41 | +} |
| 42 | + |
| 43 | +let isQueueItemEarlier = (a: multiChainEventComparitor, b: multiChainEventComparitor): bool => { |
| 44 | + a.earliestEvent->getQueueItemComparitor(~chain=a.chain) < |
| 45 | + b.earliestEvent->getQueueItemComparitor(~chain=b.chain) |
| 46 | +} |
| 47 | + |
| 48 | +/** |
| 49 | + It either returnes an earliest item among all chains, or None if no chains are actively indexing |
| 50 | + */ |
| 51 | +let getOrderedNextItem = (fetchStates: ChainMap.t<FetchState.t>): option< |
| 52 | + multiChainEventComparitor, |
| 53 | +> => { |
| 54 | + fetchStates |
| 55 | + ->ChainMap.entries |
| 56 | + ->Belt.Array.reduce(None, (accum, (chain, fetchState)) => { |
| 57 | + // If the fetch state has reached the end block we don't need to consider it |
| 58 | + if fetchState->FetchState.isActivelyIndexing { |
| 59 | + let earliestEvent = fetchState->FetchState.getEarliestEvent |
| 60 | + let current: multiChainEventComparitor = {chain, earliestEvent} |
| 61 | + switch accum { |
| 62 | + | Some(previous) if isQueueItemEarlier(previous, current) => accum |
| 63 | + | _ => Some(current) |
| 64 | + } |
| 65 | + } else { |
| 66 | + accum |
| 67 | + } |
| 68 | + }) |
| 69 | +} |
| 70 | + |
| 71 | +let popOrderedBatchItems = ( |
| 72 | + ~maxBatchSize, |
| 73 | + ~fetchStates: ChainMap.t<FetchState.t>, |
| 74 | + ~sizePerChain: dict<int>, |
| 75 | +) => { |
| 76 | + let items = [] |
| 77 | + |
| 78 | + let rec loop = () => |
| 79 | + if items->Array.length < maxBatchSize { |
| 80 | + switch fetchStates->getOrderedNextItem { |
| 81 | + | Some({earliestEvent}) => |
| 82 | + switch earliestEvent { |
| 83 | + | NoItem(_) => () |
| 84 | + | Item({item, popItemOffQueue}) => { |
| 85 | + popItemOffQueue() |
| 86 | + items->Js.Array2.push(item)->ignore |
| 87 | + sizePerChain->Utils.Dict.incrementByInt(item.chain->ChainMap.Chain.toChainId) |
| 88 | + loop() |
| 89 | + } |
| 90 | + } |
| 91 | + | _ => () |
| 92 | + } |
| 93 | + } |
| 94 | + loop() |
| 95 | + |
| 96 | + items |
| 97 | +} |
| 98 | + |
| 99 | +let popUnorderedBatchItems = ( |
| 100 | + ~maxBatchSize, |
| 101 | + ~fetchStates: ChainMap.t<FetchState.t>, |
| 102 | + ~sizePerChain: dict<int>, |
| 103 | +) => { |
| 104 | + let items = [] |
| 105 | + |
| 106 | + let preparedFetchStates = |
| 107 | + fetchStates |
| 108 | + ->ChainMap.values |
| 109 | + ->FetchState.filterAndSortForUnorderedBatch(~maxBatchSize) |
| 110 | + |
| 111 | + let idx = ref(0) |
| 112 | + let preparedNumber = preparedFetchStates->Array.length |
| 113 | + let batchSize = ref(0) |
| 114 | + |
| 115 | + // Accumulate items for all actively indexing chains |
| 116 | + // the way to group as many items from a single chain as possible |
| 117 | + // This way the loaders optimisations will hit more often |
| 118 | + while batchSize.contents < maxBatchSize && idx.contents < preparedNumber { |
| 119 | + let fetchState = preparedFetchStates->Js.Array2.unsafe_get(idx.contents) |
| 120 | + let batchSizeBeforeTheChain = batchSize.contents |
| 121 | + |
| 122 | + let rec loop = () => |
| 123 | + if batchSize.contents < maxBatchSize { |
| 124 | + let earliestEvent = fetchState->FetchState.getEarliestEvent |
| 125 | + switch earliestEvent { |
| 126 | + | NoItem(_) => () |
| 127 | + | Item({item, popItemOffQueue}) => { |
| 128 | + popItemOffQueue() |
| 129 | + items->Js.Array2.push(item)->ignore |
| 130 | + batchSize := batchSize.contents + 1 |
| 131 | + loop() |
| 132 | + } |
| 133 | + } |
| 134 | + } |
| 135 | + loop() |
| 136 | + |
| 137 | + let chainBatchSize = batchSize.contents - batchSizeBeforeTheChain |
| 138 | + if chainBatchSize > 0 { |
| 139 | + sizePerChain->Utils.Dict.setByInt(fetchState.chainId, chainBatchSize) |
| 140 | + } |
| 141 | + |
| 142 | + idx := idx.contents + 1 |
| 143 | + } |
| 144 | + |
| 145 | + items |
| 146 | +} |
0 commit comments