diff --git a/apps/arweave/src/ar_data_sync.erl b/apps/arweave/src/ar_data_sync.erl index fc3bd066b..1f16bb23c 100644 --- a/apps/arweave/src/ar_data_sync.erl +++ b/apps/arweave/src/ar_data_sync.erl @@ -926,20 +926,10 @@ handle_cast({store_fetched_chunk, Peer, Time, TransferSize, Byte, Proof} = Cast, {BlockStartOffset, BlockEndOffset, TXRoot} = ar_block_index:get_block_bounds(SeekByte), BlockSize = BlockEndOffset - BlockStartOffset, Offset = SeekByte - BlockStartOffset, - ValidateDataPathFun = - case BlockStartOffset >= get_merkle_rebase_threshold() of - true -> - fun ar_merkle:validate_path2/4; - false -> - case BlockStartOffset >= ?STRICT_DATA_SPLIT_THRESHOLD of - true -> - fun ar_merkle:validate_path_strict_data_split/4; - false -> - fun ar_merkle:validate_path_strict_borders/4 - end - end, + ValidateDataPathRuleset = ar_poa:get_data_path_validation_ruleset(BlockStartOffset, + get_merkle_rebase_threshold()), case validate_proof(TXRoot, BlockStartOffset, Offset, BlockSize, Proof, - ValidateDataPathFun) of + ValidateDataPathRuleset) of {need_unpacking, AbsoluteOffset, ChunkArgs, VArgs} -> {Packing, DataRoot, TXStartOffset, ChunkEndOffset, TXSize, ChunkID} = VArgs, AbsoluteTXStartOffset = BlockStartOffset + TXStartOffset, @@ -1518,22 +1508,12 @@ validate_served_chunk(Args) -> false -> case ar_block_index:get_block_bounds(Offset - 1) of {BlockStart, BlockEnd, TXRoot} -> - ValidateDataPathFun = - case BlockStart >= get_merkle_rebase_threshold() of - true -> - fun ar_merkle:validate_path2/4; - false -> - case BlockStart >= ?STRICT_DATA_SPLIT_THRESHOLD of - true -> - fun ar_merkle:validate_path_strict_data_split/4; - false -> - fun ar_merkle:validate_path_strict_borders/4 - end - end, + ValidateDataPathRuleset = ar_poa:get_data_path_validation_ruleset( + BlockStart, get_merkle_rebase_threshold()), BlockSize = BlockEnd - BlockStart, ChunkOffset = Offset - BlockStart - 1, case validate_proof2({TXRoot, ChunkOffset, BlockSize, DataPath, TXPath, - ChunkSize, ValidateDataPathFun}) of + ChunkSize, ValidateDataPathRuleset}) of {true, ChunkID} -> {true, ChunkID}; false -> @@ -2383,7 +2363,7 @@ enqueue_peer_range(Peer, RangeStart, RangeEnd, ChunkOffsets, {Q, QIntervals}) -> QIntervals2 = ar_intervals:add(QIntervals, RangeEnd, RangeStart), {Q2, QIntervals2}. -validate_proof(TXRoot, BlockStartOffset, Offset, BlockSize, Proof, ValidateDataPathFun) -> +validate_proof(TXRoot, BlockStartOffset, Offset, BlockSize, Proof, ValidateDataPathRuleset) -> #{ data_path := DataPath, tx_path := TXPath, chunk := Chunk, packing := Packing } = Proof, case ar_merkle:validate_path(TXRoot, Offset, BlockSize, TXPath) of false -> @@ -2391,7 +2371,8 @@ validate_proof(TXRoot, BlockStartOffset, Offset, BlockSize, Proof, ValidateDataP {DataRoot, TXStartOffset, TXEndOffset} -> TXSize = TXEndOffset - TXStartOffset, ChunkOffset = Offset - TXStartOffset, - case ValidateDataPathFun(DataRoot, ChunkOffset, TXSize, DataPath) of + case ar_merkle:validate_path(DataRoot, ChunkOffset, TXSize, DataPath, + ValidateDataPathRuleset) of false -> false; {ChunkID, ChunkStartOffset, ChunkEndOffset} -> @@ -2421,14 +2402,15 @@ validate_proof(TXRoot, BlockStartOffset, Offset, BlockSize, Proof, ValidateDataP end. validate_proof2(Args) -> - {TXRoot, Offset, BlockSize, DataPath, TXPath, ChunkSize, ValidateDataPathFun} = Args, + {TXRoot, Offset, BlockSize, DataPath, TXPath, ChunkSize, ValidateDataPathRuleset} = Args, case ar_merkle:validate_path(TXRoot, Offset, BlockSize, TXPath) of false -> false; {DataRoot, TXStartOffset, TXEndOffset} -> TXSize = TXEndOffset - TXStartOffset, ChunkOffset = Offset - TXStartOffset, - case ValidateDataPathFun(DataRoot, ChunkOffset, TXSize, DataPath) of + case ar_merkle:validate_path(DataRoot, ChunkOffset, TXSize, DataPath, + ValidateDataPathRuleset) of {ChunkID, ChunkStartOffset, ChunkEndOffset} -> case ChunkEndOffset - ChunkStartOffset == ChunkSize of false -> @@ -2442,19 +2424,21 @@ validate_proof2(Args) -> end. validate_data_path(DataRoot, Offset, TXSize, DataPath, Chunk) -> - Base = ar_merkle:validate_path_strict_borders(DataRoot, Offset, TXSize, DataPath), - Strict = ar_merkle:validate_path_strict_data_split(DataRoot, Offset, TXSize, DataPath), - Rebase = ar_merkle:validate_path2(DataRoot, Offset, TXSize, DataPath), + Base = ar_merkle:validate_path(DataRoot, Offset, TXSize, DataPath, strict_borders_ruleset), + Strict = ar_merkle:validate_path(DataRoot, Offset, TXSize, DataPath, + strict_data_split_ruleset), + Rebase = ar_merkle:validate_path(DataRoot, Offset, TXSize, DataPath, + offset_rebase_support_ruleset), Result = case {Base, Strict, Rebase} of {false, false, false} -> false; - {_, {_, _, _} = R, _} -> - R; - {_, _, {_, _, _} = R} -> - R; - {{_, _, _} = R, _, _} -> - R + {_, {_, _, _} = StrictResult, _} -> + StrictResult; + {_, _, {_, _, _} = RebaseResult} -> + RebaseResult; + {{_, _, _} = BaseResult, _, _} -> + BaseResult end, case Result of false -> @@ -2966,13 +2950,17 @@ process_disk_pool_chunk_offset(Iterator, TXRoot, TXPath, AbsoluteOffset, MayConc case {AbsoluteOffset >= get_merkle_rebase_threshold(), AbsoluteOffset >= ?STRICT_DATA_SPLIT_THRESHOLD, PassedBase, PassedStrictValidation, PassedRebaseValidation} of + %% At the rebase threshold we relax some of the validation rules so the strict + %% validation may fail. {true, true, _, _, true} -> true; - {false, true, true, true, true} -> - true; - {false, false, true, false, false} -> + %% Between the "strict" and "rebase" thresholds the "base" and "strict split" + %% rules must be followed. + {false, true, true, true, _} -> true; - {false, false, true, true, true} -> + %% Before the strict threshold only the base (most relaxed) validation must + %% pass. + {false, false, true, _, _} -> true; _ -> false @@ -3056,8 +3044,8 @@ process_disk_pool_matured_chunk_offset(Iterator, TXRoot, TXPath, AbsoluteOffset, %% The other modules will either sync the chunk themselves or copy it over from the %% other module the next time the node is restarted. #sync_data_state{ chunk_data_db = ChunkDataDB, store_id = DefaultStoreID } = State, - {Offset, _, ChunkSize, DataRoot, DataPathHash, ChunkDataKey, Key, _BaseSplit, _StrictSplit, - _RebaseSupportSplit} = Args, + {Offset, _, ChunkSize, DataRoot, DataPathHash, ChunkDataKey, Key, _PassedBaseValidation, + _PassedStrictValidation, _PassedRebaseValidation} = Args, FindStorageModule = case find_storage_module_for_disk_pool_chunk(AbsoluteOffset) of not_found -> diff --git a/apps/arweave/src/ar_merkle.erl b/apps/arweave/src/ar_merkle.erl index 12251e1c3..fd4becc70 100644 --- a/apps/arweave/src/ar_merkle.erl +++ b/apps/arweave/src/ar_merkle.erl @@ -2,13 +2,13 @@ %%% as verification of those proofs. -module(ar_merkle). --export([generate_tree/1, generate_path/3, validate_path/4, - validate_path_strict_borders/4, validate_path_strict_data_split/4, - validate_path2/4, extract_note/1, extract_root/1]). +-export([generate_tree/1, generate_path/3, validate_path/4, validate_path/5, + extract_note/1, extract_root/1]). -export([get/2, hash/1, note_to_binary/1]). -include_lib("arweave/include/ar.hrl"). +-include_lib("arweave/include/ar_consensus.hrl"). -include_lib("eunit/include/eunit.hrl"). %%% @doc Generates annotated merkle trees, paths inside those trees, as well @@ -42,25 +42,32 @@ generate_path(ID, Dest, Tree) -> binary:list_to_bin(generate_path_parts(ID, Dest, Tree, 0)). %% @doc Validate the given merkle path. -validate_path(ID, Dest, RightBound, _Path) when RightBound =< 0 -> - ?LOG_ERROR([{event, validate_path_called_with_not_positive_right_bound}, +validate_path(ID, Dest, RightBound, Path) -> + validate_path(ID, Dest, RightBound, Path, basic_ruleset). + +%% @doc Validate the given merkle path using the given set of rules. +validate_path(ID, Dest, RightBound, _Path, _Ruleset) when RightBound =< 0 -> + ?LOG_ERROR([{event, validate_path_called_with_non_positive_right_bound}, {root, ar_util:encode(ID)}, {dest, Dest}, {right_bound, RightBound}]), throw(invalid_right_bound); -validate_path(ID, Dest, RightBound, Path) when Dest >= RightBound -> - validate_path(ID, RightBound - 1, RightBound, Path); -validate_path(ID, Dest, RightBound, Path) when Dest < 0 -> - validate_path(ID, 0, RightBound, Path); -validate_path(ID, Dest, RightBound, Path) -> - validate_path(ID, Dest, 0, RightBound, Path). +validate_path(ID, Dest, RightBound, Path, Ruleset) when Dest >= RightBound -> + validate_path(ID, RightBound - 1, RightBound, Path, Ruleset); +validate_path(ID, Dest, RightBound, Path, Ruleset) when Dest < 0 -> + validate_path(ID, 0, RightBound, Path, Ruleset); +validate_path(ID, Dest, RightBound, Path, Ruleset) -> + validate_path(ID, Dest, 0, RightBound, Path, Ruleset). validate_path(ID, _Dest, LeftBound, RightBound, - << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>) -> + << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, basic_ruleset) -> case hash([hash(Data), hash(note_to_binary(EndOffset))]) of - ID -> {Data, LeftBound, max(min(RightBound, EndOffset), LeftBound + 1)}; - _ -> false + ID -> + {Data, LeftBound, max(min(RightBound, EndOffset), LeftBound + 1)}; + _ -> + false end; validate_path(ID, Dest, LeftBound, RightBound, - << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>) -> + << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>, + basic_ruleset) -> case hash([hash(L), hash(R), hash(note_to_binary(Note))]) of ID -> {Path, NextLeftBound, NextRightBound} = @@ -70,28 +77,17 @@ validate_path(ID, Dest, LeftBound, RightBound, false -> {R, max(LeftBound, Note), RightBound} end, - validate_path(Path, Dest, NextLeftBound, NextRightBound, Rest); + validate_path(Path, Dest, NextLeftBound, NextRightBound, Rest, basic_ruleset); _ -> false end; -validate_path(_, _, _, _, _) -> - false. +validate_path(_, _, _, _, _, basic_ruleset) -> + false; -%% @doc Validate the given merkle path and ensure every offset does not +%% Validate the given merkle path and ensure every offset does not %% exceed the previous offset by more than ?DATA_CHUNK_SIZE. -validate_path_strict_borders(ID, Dest, RightBound, _Path) when RightBound =< 0 -> - ?LOG_ERROR([{event, validate_path_strict_borders_called_with_not_positive_right_bound}, - {root, ar_util:encode(ID)}, {dest, Dest}, {right_bound, RightBound}]), - throw(invalid_right_bound); -validate_path_strict_borders(ID, Dest, RightBound, Path) when Dest >= RightBound -> - validate_path_strict_borders(ID, RightBound - 1, RightBound, Path); -validate_path_strict_borders(ID, Dest, RightBound, Path) when Dest < 0 -> - validate_path_strict_borders(ID, 0, RightBound, Path); -validate_path_strict_borders(ID, Dest, RightBound, Path) -> - validate_path_strict_borders(ID, Dest, 0, RightBound, Path). - -validate_path_strict_borders(ID, _Dest, LeftBound, RightBound, - << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>) -> +validate_path(ID, _Dest, LeftBound, RightBound, + << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, strict_borders_ruleset) -> case EndOffset - LeftBound > ?DATA_CHUNK_SIZE orelse RightBound - LeftBound > ?DATA_CHUNK_SIZE of true -> @@ -104,8 +100,9 @@ validate_path_strict_borders(ID, _Dest, LeftBound, RightBound, false end end; -validate_path_strict_borders(ID, Dest, LeftBound, RightBound, - << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>) -> +validate_path(ID, Dest, LeftBound, RightBound, + << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>, + strict_borders_ruleset) -> case hash([hash(L), hash(R), hash(note_to_binary(Note))]) of ID -> {Path, NextLeftBound, NextRightBound} = @@ -115,40 +112,34 @@ validate_path_strict_borders(ID, Dest, LeftBound, RightBound, false -> {R, max(LeftBound, Note), RightBound} end, - validate_path_strict_borders(Path, Dest, NextLeftBound, NextRightBound, Rest); + validate_path(Path, Dest, NextLeftBound, NextRightBound, Rest, + strict_borders_ruleset); _ -> false end; -validate_path_strict_borders(_ID, _Dest, _LeftBound, _RightBound, _Path) -> - false. +validate_path(_, _, _, _, _, strict_borders_ruleset) -> + false; -%% @doc Validate the given merkle path and ensure every offset does not +%% Validate the given merkle path and ensure every offset does not %% exceed the previous offset by more than ?DATA_CHUNK_SIZE. Additionally, %% reject chunks smaller than 256 KiB unless they are the last or the only chunks %% of their datasets or the second last chunks which do not exceed 256 KiB when %% combined with the following (last) chunks. Finally, reject chunks smaller than %% their Merkle proofs unless they are the last chunks of their datasets. -validate_path_strict_data_split(ID, Dest, RightBound, _Path) when RightBound =< 0 -> - ?LOG_ERROR([{event, validate_path_called_with_not_positive_right_bound}, - {root, ar_util:encode(ID)}, {dest, Dest}, {right_bound, RightBound}]), - throw(invalid_right_bound); -validate_path_strict_data_split(ID, Dest, RightBound, Path) when Dest >= RightBound -> - validate_path_strict_data_split(ID, RightBound - 1, RightBound, Path); -validate_path_strict_data_split(ID, Dest, RightBound, Path) when Dest < 0 -> - validate_path_strict_data_split(ID, 0, RightBound, Path); -validate_path_strict_data_split(ID, Dest, RightBound, Path) -> - validate_path_strict_data_split(ID, Dest, 0, RightBound, Path, byte_size(Path), - RightBound). - -validate_path_strict_data_split(ID, _Dest, LeftBound, RightBound, - << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, PathSize, DataSize) -> +validate_path(ID, Dest, LeftBound, RightBound, Path, strict_data_split_ruleset) -> + validate_path(ID, Dest, LeftBound, RightBound, Path, {strict_data_split_ruleset, + byte_size(Path), RightBound}); + +validate_path(ID, _Dest, LeftBound, RightBound, + << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, + {strict_data_split_ruleset, PathSize, DataSize}) -> case EndOffset - LeftBound > ?DATA_CHUNK_SIZE orelse RightBound - LeftBound > ?DATA_CHUNK_SIZE of true -> false; false -> ChunkSize = EndOffset - LeftBound, - ValidateSplit = + IsSplitValid = case validate_strict_split of _ when ChunkSize == (?DATA_CHUNK_SIZE) -> LeftBound rem (?DATA_CHUNK_SIZE) == 0; @@ -163,7 +154,7 @@ validate_path_strict_data_split(ID, _Dest, LeftBound, RightBound, andalso DataSize - LeftBound > (?DATA_CHUNK_SIZE) andalso DataSize - LeftBound < 2 * (?DATA_CHUNK_SIZE) end, - case ValidateSplit of + case IsSplitValid of false -> false; true -> @@ -175,9 +166,9 @@ validate_path_strict_data_split(ID, _Dest, LeftBound, RightBound, end end end; -validate_path_strict_data_split(ID, Dest, LeftBound, RightBound, +validate_path(ID, Dest, LeftBound, RightBound, << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>, - PathSize, DataSize) -> + {strict_data_split_ruleset, PathSize, DataSize}) -> case hash([hash(L), hash(R), hash(note_to_binary(Note))]) of ID -> {Path, NextLeftBound, NextRightBound} = @@ -187,30 +178,22 @@ validate_path_strict_data_split(ID, Dest, LeftBound, RightBound, false -> {R, max(LeftBound, Note), RightBound} end, - validate_path_strict_data_split(Path, Dest, NextLeftBound, NextRightBound, Rest, - PathSize, DataSize); + validate_path(Path, Dest, NextLeftBound, NextRightBound, Rest, + {strict_data_split_ruleset, PathSize, DataSize}); _ -> false end; -validate_path_strict_data_split(_ID, _Dest, _LeftBound, _RightBound, _Path, _PathSize, - _DataSize) -> - false. +validate_path(_, _, _, _, _, {strict_data_split_ruleset, _, _}) -> + false; -%% @doc Validate the given merkle path where any subtrees may have 0-based offset. -validate_path2(ID, Dest, RightBound, _Path) when RightBound =< 0 -> - ?LOG_ERROR([{event, validate_path2_called_with_not_positive_right_bound}, - {root, ar_util:encode(ID)}, {dest, Dest}, {right_bound, RightBound}]), - throw(invalid_right_bound); -validate_path2(ID, Dest, RightBound, Path) when Dest >= RightBound -> - validate_path2(ID, RightBound - 1, RightBound, Path); -validate_path2(ID, Dest, RightBound, Path) when Dest < 0 -> - validate_path2(ID, 0, RightBound, Path); -validate_path2(ID, Dest, RightBound, Path) -> - validate_path2(ID, Dest, 0, RightBound, Path, byte_size(Path), RightBound, 0). - -validate_path2(ID, _Dest, LeftBound, RightBound, - << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, PathSize, DataSize, - LeftBoundShift) -> +%% Validate the given merkle path where any subtrees may have 0-based offset. +validate_path(ID, Dest, LeftBound, RightBound, Path, offset_rebase_support_ruleset) -> + validate_path(ID, Dest, LeftBound, RightBound, Path, + {offset_rebase_support_ruleset, undefined, 0}); + +validate_path(ID, _Dest, LeftBound, RightBound, + << Data:?HASH_SIZE/binary, EndOffset:(?NOTE_SIZE*8) >>, + {offset_rebase_support_ruleset, IsRightMostInItsSubTree, LeftBoundShift}) -> case EndOffset - LeftBound > ?DATA_CHUNK_SIZE orelse RightBound - LeftBound > ?DATA_CHUNK_SIZE of true -> @@ -218,25 +201,23 @@ validate_path2(ID, _Dest, LeftBound, RightBound, false; false -> ChunkSize = EndOffset - LeftBound, - ValidateSplit = - case validate_strict_split of - _ when ChunkSize == (?DATA_CHUNK_SIZE) -> - LeftBound rem (?DATA_CHUNK_SIZE) == 0; - _ when EndOffset == DataSize -> - Border = RightBound - RightBound rem (?DATA_CHUNK_SIZE), - RightBound rem (?DATA_CHUNK_SIZE) > 0 - andalso LeftBound =< Border; - _ when PathSize > ChunkSize -> - false; + IsSplitValid = + case IsRightMostInItsSubTree of + true -> + %% The last chunk may either start at the bucket start or + %% span two buckets. + Bucket0 = LeftBound div (?DATA_CHUNK_SIZE), + Bucket1 = EndOffset div (?DATA_CHUNK_SIZE), + (LeftBound rem (?DATA_CHUNK_SIZE) == 0) orelse Bucket0 + 1 == Bucket1; _ -> + %% May also be the only chunk of a single-chunk subtree. LeftBound rem (?DATA_CHUNK_SIZE) == 0 - andalso DataSize - LeftBound > (?DATA_CHUNK_SIZE) - andalso DataSize - LeftBound < 2 * (?DATA_CHUNK_SIZE) end, - case ValidateSplit of + case IsSplitValid of false -> - io:format("Split: E=~B L=~B R=~B C=~B~n", - [EndOffset, LeftBound, RightBound, ChunkSize]), % TODO + io:format("Split: E=~B L=~B R=~B C=~B ~p~n", + [EndOffset, LeftBound, RightBound, ChunkSize, + IsRightMostInItsSubTree]), % TODO false; true -> case hash([hash(Data), hash(note_to_binary(EndOffset))]) of @@ -252,10 +233,10 @@ validate_path2(ID, _Dest, LeftBound, RightBound, end end end; -validate_path2(ID, Dest, LeftBound, RightBound, +validate_path(ID, Dest, LeftBound, RightBound, << 0:(?HASH_SIZE*8), L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, - Note:(?NOTE_SIZE*8), Rest/binary >>, PathSize, DataSize, - LeftBoundShift) -> + Note:(?NOTE_SIZE*8), Rest/binary >>, + {offset_rebase_support_ruleset, _IsRightMostInItsSubTree, LeftBoundShift}) -> case hash([hash(L), hash(R), hash(note_to_binary(Note))]) of ID -> {Path, NextLeftBound, NextRightBound, Dest2, NextLeftBoundShift} = @@ -272,34 +253,37 @@ validate_path2(ID, Dest, LeftBound, RightBound, end, io:format("Passing: E=~B L=~B R=~B Shift: ~B NextShift: ~B~n", [Dest, LeftBound, RightBound, LeftBoundShift, NextLeftBoundShift]), % TODO - validate_path2(Path, Dest2, NextLeftBound, NextRightBound, Rest, - PathSize, DataSize, NextLeftBoundShift); + validate_path(Path, Dest2, NextLeftBound, NextRightBound, Rest, + {offset_rebase_support_ruleset, + %% IsRightMostInItsSubTree=undefined because we step + %% into the rebased tree. + undefined, + NextLeftBoundShift}); _ -> false end; -validate_path2(ID, Dest, LeftBound, RightBound, +validate_path(ID, Dest, LeftBound, RightBound, << L:?HASH_SIZE/binary, R:?HASH_SIZE/binary, Note:(?NOTE_SIZE*8), Rest/binary >>, - PathSize, DataSize, LeftBoundShift) -> + {offset_rebase_support_ruleset, IsRightMostInItsSubTree, LeftBoundShift}) -> case hash([hash(L), hash(R), hash(note_to_binary(Note))]) of ID -> io:format("Passing no rebase: E=~B L=~B R=~B~n", [Dest, LeftBound, RightBound]), % TODO - {Path, NextLeftBound, NextRightBound} = + {Path, NextLeftBound, NextRightBound, IsRightMostInItsSubTree2} = case Dest < Note of true -> - {L, LeftBound, min(RightBound, Note)}; + {L, LeftBound, min(RightBound, Note), false}; false -> - {R, max(LeftBound, Note), RightBound} + {R, max(LeftBound, Note), RightBound, + case IsRightMostInItsSubTree of undefined -> true; + _ -> IsRightMostInItsSubTree end} end, - validate_path2(Path, Dest, NextLeftBound, NextRightBound, Rest, - PathSize, DataSize, LeftBoundShift); + validate_path(Path, Dest, NextLeftBound, NextRightBound, Rest, + {offset_rebase_support_ruleset, IsRightMostInItsSubTree2, LeftBoundShift}); _ -> false end; -validate_path2(_ID, Dest, LeftBound, RightBound, Path, _PathSize, _DataSize, - _LeftBoundShift) -> - io:format("Layout: E=~B L=~B R=~B, Path: ~p Size: ~B~n", - [Dest, LeftBound, RightBound, Path, byte_size(Path)]), % TODO +validate_path(_, _, _, _, _, {offset_rebase_support_ruleset, _, _}) -> false. %% @doc Get the note (offset) attached to the leaf from a path. @@ -460,7 +444,8 @@ test_generate_and_validate_balanced_tree_path() -> {Leaf, StartOffset, EndOffset} = ar_merkle:validate_path(MR, RandomTarget, ?TEST_SIZE, Path), {Leaf, StartOffset, EndOffset} = - ar_merkle:validate_path_strict_borders(MR, RandomTarget, ?TEST_SIZE, Path), + ar_merkle:validate_path(MR, RandomTarget, ?TEST_SIZE, Path, + strict_borders_ruleset), ?assertEqual(RandomTarget, binary:decode_unsigned(Leaf)), ?assert(RandomTarget < EndOffset), ?assert(RandomTarget >= StartOffset) @@ -480,26 +465,28 @@ generate_and_validate_tree_with_rebase_test() -> {Root0, Tree0} = ar_merkle:generate_tree(Tags0), ?assertNotEqual(Root1, Root0), Path0_1 = ar_merkle:generate_path(Root0, 0, Tree0), - {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path2(Root0, 0, - 2 * ?DATA_CHUNK_SIZE, Path0_1), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root0, 0, 2 * ?DATA_CHUNK_SIZE, + Path0_1, offset_rebase_support_ruleset), Path1_1 = ar_merkle:generate_path(Root1, 0, Tree1), ?assertNotEqual(Path0_1, Path1_1), - {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path2(Root1, 0, - 2 * ?DATA_CHUNK_SIZE, Path1_1), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root1, 0, 2 * ?DATA_CHUNK_SIZE, + Path1_1, offset_rebase_support_ruleset), Path0_2 = ar_merkle:generate_path(Root0, ?DATA_CHUNK_SIZE, Tree0), Path1_2 = ar_merkle:generate_path(Root1, ?DATA_CHUNK_SIZE, Tree1), ?assertNotEqual(Path1_2, Path0_2), - {Leaf2, ?DATA_CHUNK_SIZE, Right0_2} = ar_merkle:validate_path2(Root0, ?DATA_CHUNK_SIZE, - 2 * ?DATA_CHUNK_SIZE, Path0_2), + {Leaf2, ?DATA_CHUNK_SIZE, Right0_2} = ar_merkle:validate_path(Root0, ?DATA_CHUNK_SIZE, + 2 * ?DATA_CHUNK_SIZE, Path0_2, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Right0_2), - {Leaf2, ?DATA_CHUNK_SIZE, Right1_2} = ar_merkle:validate_path2(Root1, ?DATA_CHUNK_SIZE, - 2 * ?DATA_CHUNK_SIZE, Path1_2), + {Leaf2, ?DATA_CHUNK_SIZE, Right1_2} = ar_merkle:validate_path(Root1, ?DATA_CHUNK_SIZE, + 2 * ?DATA_CHUNK_SIZE, Path1_2, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Right1_2), - {Leaf2, ?DATA_CHUNK_SIZE, Right1_2} = ar_merkle:validate_path2(Root1, - 2 * ?DATA_CHUNK_SIZE - 1, 2 * ?DATA_CHUNK_SIZE, Path1_2), - ?assertEqual(false, ar_merkle:validate_path2(Root1, ?DATA_CHUNK_SIZE, - 2 * ?DATA_CHUNK_SIZE, Path1_1)), - ?assertEqual(false, ar_merkle:validate_path2(Root1, 0, 2 * ?DATA_CHUNK_SIZE, Path1_2)), + {Leaf2, ?DATA_CHUNK_SIZE, Right1_2} = ar_merkle:validate_path(Root1, + 2 * ?DATA_CHUNK_SIZE - 1, 2 * ?DATA_CHUNK_SIZE, Path1_2, + offset_rebase_support_ruleset), + ?assertEqual(false, ar_merkle:validate_path(Root1, ?DATA_CHUNK_SIZE, + 2 * ?DATA_CHUNK_SIZE, Path1_1, offset_rebase_support_ruleset)), + ?assertEqual(false, ar_merkle:validate_path(Root1, 0, + 2 * ?DATA_CHUNK_SIZE, Path1_2, offset_rebase_support_ruleset)), %% Root2 %% / \ %% Leaf1 (with offset reset) Leaf2 (with offset reset) @@ -507,17 +494,19 @@ generate_and_validate_tree_with_rebase_test() -> Tags2 = [[{Leaf1, ?DATA_CHUNK_SIZE}], {Leaf2, ?DATA_CHUNK_SIZE * 2}], {Root2, Tree2} = ar_merkle:generate_tree(Tags2), Path2_1 = ar_merkle:generate_path(Root2, 0, Tree2), - {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path2(Root2, 0, - 2 * ?DATA_CHUNK_SIZE, Path2_1), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root2, 0, + 2 * ?DATA_CHUNK_SIZE, Path2_1, offset_rebase_support_ruleset), Path2_2 = ar_merkle:generate_path(Root2, ?DATA_CHUNK_SIZE, Tree2), - {Leaf2, ?DATA_CHUNK_SIZE, Right2_2} = ar_merkle:validate_path2(Root2, ?DATA_CHUNK_SIZE, - 2 * ?DATA_CHUNK_SIZE, Path2_2), + {Leaf2, ?DATA_CHUNK_SIZE, Right2_2} = ar_merkle:validate_path(Root2, + ?DATA_CHUNK_SIZE, 2 * ?DATA_CHUNK_SIZE, Path2_2, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Right2_2), - {Leaf2, ?DATA_CHUNK_SIZE, Right2_2} = ar_merkle:validate_path2(Root2, - 2 * ?DATA_CHUNK_SIZE - 1, 2 * ?DATA_CHUNK_SIZE, Path2_2), - ?assertEqual(false, ar_merkle:validate_path2(Root2, ?DATA_CHUNK_SIZE, - 2 * ?DATA_CHUNK_SIZE, Path2_1)), - ?assertEqual(false, ar_merkle:validate_path2(Root2, 0, 2 * ?DATA_CHUNK_SIZE, Path2_2)), + {Leaf2, ?DATA_CHUNK_SIZE, Right2_2} = ar_merkle:validate_path(Root2, + 2 * ?DATA_CHUNK_SIZE - 1, 2 * ?DATA_CHUNK_SIZE, Path2_2, + offset_rebase_support_ruleset), + ?assertEqual(false, ar_merkle:validate_path(Root2, ?DATA_CHUNK_SIZE, + 2 * ?DATA_CHUNK_SIZE, Path2_1, offset_rebase_support_ruleset)), + ?assertEqual(false, ar_merkle:validate_path(Root2, 0, + 2 * ?DATA_CHUNK_SIZE, Path2_2, offset_rebase_support_ruleset)), %% Root3 %% / \ %% SubTree1 SubTree2 @@ -536,35 +525,35 @@ generate_and_validate_tree_with_rebase_test() -> {Root3, Tree3} = ar_merkle:generate_tree(Tags3), Path3_1 = ar_merkle:generate_path(Root3, 0, Tree3), ?debugMsg("Path 1"), - {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path2(Root3, 0, - 6 * ?DATA_CHUNK_SIZE, Path3_1), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root3, 0, 6 * ?DATA_CHUNK_SIZE, + Path3_1, offset_rebase_support_ruleset), Path3_2 = ar_merkle:generate_path(Root3, ?DATA_CHUNK_SIZE, Tree3), ?debugMsg("Path 2"), - {Leaf2, ?DATA_CHUNK_SIZE, Right3_2} = ar_merkle:validate_path2(Root3, ?DATA_CHUNK_SIZE, - 6 * ?DATA_CHUNK_SIZE, Path3_2), + {Leaf2, ?DATA_CHUNK_SIZE, Right3_2} = ar_merkle:validate_path(Root3, ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path3_2, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Right3_2), Path3_3 = ar_merkle:generate_path(Root3, ?DATA_CHUNK_SIZE * 2, Tree3), ?debugMsg("Path 3"), - {Leaf3, Left3_3, Right3_3} = ar_merkle:validate_path2(Root3, - 2 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path3_3), + {Leaf3, Left3_3, Right3_3} = ar_merkle:validate_path(Root3, 2 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path3_3, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Left3_3), ?assertEqual(3 * ?DATA_CHUNK_SIZE, Right3_3), Path3_4 = ar_merkle:generate_path(Root3, ?DATA_CHUNK_SIZE * 3, Tree3), ?debugMsg("Path 4"), - {Leaf4, Left3_4, Right3_4} = ar_merkle:validate_path2(Root3, - 3 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path3_4), + {Leaf4, Left3_4, Right3_4} = ar_merkle:validate_path(Root3, 3 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path3_4, offset_rebase_support_ruleset), ?assertEqual(3 * ?DATA_CHUNK_SIZE, Left3_4), ?assertEqual(4 * ?DATA_CHUNK_SIZE, Right3_4), Path3_5 = ar_merkle:generate_path(Root3, ?DATA_CHUNK_SIZE * 4, Tree3), ?debugMsg("Path 5"), - {Leaf5, Left3_5, Right3_5} = ar_merkle:validate_path2(Root3, - 4 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path3_5), + {Leaf5, Left3_5, Right3_5} = ar_merkle:validate_path(Root3, 4 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path3_5, offset_rebase_support_ruleset), ?assertEqual(4 * ?DATA_CHUNK_SIZE, Left3_5), ?assertEqual(5 * ?DATA_CHUNK_SIZE, Right3_5), Path3_6 = ar_merkle:generate_path(Root3, ?DATA_CHUNK_SIZE * 5, Tree3), ?debugMsg("Path 6"), - {Leaf6, Left3_6, Right3_6} = ar_merkle:validate_path2(Root3, - 5 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path3_6), + {Leaf6, Left3_6, Right3_6} = ar_merkle:validate_path(Root3, 5 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path3_6, offset_rebase_support_ruleset), ?assertEqual(5 * ?DATA_CHUNK_SIZE, Left3_6), ?assertEqual(6 * ?DATA_CHUNK_SIZE, Right3_6), %% Root4 @@ -581,37 +570,70 @@ generate_and_validate_tree_with_rebase_test() -> {Root4, Tree4} = ar_merkle:generate_tree(Tags4), Path4_1 = ar_merkle:generate_path(Root4, 0, Tree4), ?debugMsg("Path 1"), - {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path2(Root4, 0, - 6 * ?DATA_CHUNK_SIZE, Path4_1), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root4, 0, 6 * ?DATA_CHUNK_SIZE, + Path4_1, offset_rebase_support_ruleset), Path4_2 = ar_merkle:generate_path(Root4, ?DATA_CHUNK_SIZE, Tree4), ?debugMsg("Path 2"), - {Leaf2, ?DATA_CHUNK_SIZE, Right4_2} = ar_merkle:validate_path2(Root4, ?DATA_CHUNK_SIZE, - 6 * ?DATA_CHUNK_SIZE, Path4_2), + {Leaf2, ?DATA_CHUNK_SIZE, Right4_2} = ar_merkle:validate_path(Root4, ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path4_2, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Right4_2), Path4_3 = ar_merkle:generate_path(Root4, ?DATA_CHUNK_SIZE * 2, Tree4), ?debugMsg("Path 3"), - {Leaf3, Left4_3, Right4_3} = ar_merkle:validate_path2(Root4, - 2 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path4_3), + {Leaf3, Left4_3, Right4_3} = ar_merkle:validate_path(Root4, 2 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path4_3, offset_rebase_support_ruleset), ?assertEqual(2 * ?DATA_CHUNK_SIZE, Left4_3), ?assertEqual(3 * ?DATA_CHUNK_SIZE, Right4_3), Path4_4 = ar_merkle:generate_path(Root4, ?DATA_CHUNK_SIZE * 3, Tree4), ?debugMsg("Path 4"), - {Leaf4, Left4_4, Right4_4} = ar_merkle:validate_path2(Root4, - 3 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path4_4), + {Leaf4, Left4_4, Right4_4} = ar_merkle:validate_path(Root4, 3 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path4_4, offset_rebase_support_ruleset), ?assertEqual(3 * ?DATA_CHUNK_SIZE, Left4_4), ?assertEqual(4 * ?DATA_CHUNK_SIZE, Right4_4), Path4_5 = ar_merkle:generate_path(Root4, ?DATA_CHUNK_SIZE * 4, Tree4), ?debugMsg("Path 5"), - {Leaf5, Left4_5, Right4_5} = ar_merkle:validate_path2(Root4, - 4 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path4_5), + {Leaf5, Left4_5, Right4_5} = ar_merkle:validate_path(Root4, 4 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path4_5, offset_rebase_support_ruleset), ?assertEqual(4 * ?DATA_CHUNK_SIZE, Left4_5), ?assertEqual(5 * ?DATA_CHUNK_SIZE, Right4_5), Path4_6 = ar_merkle:generate_path(Root4, ?DATA_CHUNK_SIZE * 5, Tree4), ?debugMsg("Path 6"), - {Leaf6, Left4_6, Right4_6} = ar_merkle:validate_path2(Root4, - 5 * ?DATA_CHUNK_SIZE, 6 * ?DATA_CHUNK_SIZE, Path4_6), + {Leaf6, Left4_6, Right4_6} = ar_merkle:validate_path(Root4, 5 * ?DATA_CHUNK_SIZE, + 6 * ?DATA_CHUNK_SIZE, Path4_6, offset_rebase_support_ruleset), ?assertEqual(5 * ?DATA_CHUNK_SIZE, Left4_6), - ?assertEqual(6 * ?DATA_CHUNK_SIZE, Right4_6). + ?assertEqual(6 * ?DATA_CHUNK_SIZE, Right4_6), + %% Root5 + %% / \ + %% Leaf1 Leaf2 (with offset reset, < 256 KiB) + ?debugMsg("Tree 5"), + Tags5 = [{Leaf1, ?DATA_CHUNK_SIZE}, [{Leaf2, 100}]], + {Root5, Tree5} = ar_merkle:generate_tree(Tags5), + Path5_1 = ar_merkle:generate_path(Root5, 0, Tree5), + {Leaf1, 0, ?DATA_CHUNK_SIZE} = ar_merkle:validate_path(Root5, 0, + ?DATA_CHUNK_SIZE + 100, Path5_1, offset_rebase_support_ruleset), + Path5_2 = ar_merkle:generate_path(Root5, ?DATA_CHUNK_SIZE, Tree5), + {Leaf2, ?DATA_CHUNK_SIZE, Right5_2} = ar_merkle:validate_path(Root5, + ?DATA_CHUNK_SIZE, ?DATA_CHUNK_SIZE + 100, Path5_2, offset_rebase_support_ruleset), + ?assertEqual(?DATA_CHUNK_SIZE + 100, Right5_2), + %% Root6 + %% / \ + %% SubTree1 Leaf3 + %% / \ + %% Leaf1 (< 256 KiB) Leaf2 (< 256 KiB, spans two buckets) + ?debugMsg("Tree 6"), + Tags6 = [[{Leaf1, 131070}, {Leaf2, 393213}], {Leaf3, 655355}], + {Root6, Tree6} = ar_merkle:generate_tree(Tags6), + Path6_1 = ar_merkle:generate_path(Root6, 0, Tree6), + {Leaf1, 0, 131070} = ar_merkle:validate_path(Root6, 0, + 1000000, % an arbitrary bound > 655355 + Path6_1, offset_rebase_support_ruleset), + Path6_2 = ar_merkle:generate_path(Root6, 131070, Tree6), + {Leaf2, 131070, Right6_2} = ar_merkle:validate_path(Root6, 131070 + 5, + 655355, Path6_2, offset_rebase_support_ruleset), + ?assertEqual(393213, Right6_2), + Path6_3 = ar_merkle:generate_path(Root6, 393213 + 1, Tree6), + {Leaf3, 393213, Right6_3} = ar_merkle:validate_path(Root6, 393213 + 2, 655355, Path6_3, + offset_rebase_support_ruleset), + ?assertEqual(655355, Right6_3). generate_and_validate_uneven_tree_path_test() -> Tags = make_tags_cumulative([{<>, 1} @@ -622,8 +644,8 @@ generate_and_validate_uneven_tree_path_test() -> {Leaf, StartOffset, EndOffset} = ar_merkle:validate_path(MR, ?UNEVEN_TEST_TARGET, ?UNEVEN_TEST_SIZE, Path), {Leaf, StartOffset, EndOffset} = - ar_merkle:validate_path_strict_borders(MR, ?UNEVEN_TEST_TARGET, ?UNEVEN_TEST_SIZE, - Path), + ar_merkle:validate_path(MR, ?UNEVEN_TEST_TARGET, ?UNEVEN_TEST_SIZE, + Path, strict_borders_ruleset), ?assertEqual(?UNEVEN_TEST_TARGET, binary:decode_unsigned(Leaf)), ?assert(?UNEVEN_TEST_TARGET < EndOffset), ?assert(?UNEVEN_TEST_TARGET >= StartOffset). diff --git a/apps/arweave/src/ar_poa.erl b/apps/arweave/src/ar_poa.erl index b403119bb..a62d68487 100644 --- a/apps/arweave/src/ar_poa.erl +++ b/apps/arweave/src/ar_poa.erl @@ -2,15 +2,46 @@ %%% for a chunk of data received from the network. -module(ar_poa). --export([validate_pre_fork_2_5/4, validate/1, get_padded_offset/2]). +-export([get_data_path_validation_ruleset/2, get_data_path_validation_ruleset/3, + validate_pre_fork_2_5/4, validate/1, get_padded_offset/2]). -include_lib("arweave/include/ar.hrl"). +-include_lib("arweave/include/ar_consensus.hrl"). -include_lib("arweave/include/ar_pricing.hrl"). %%%=================================================================== %%% Public interface. %%%=================================================================== +%% @doc Return the merkle proof validation ruleset code depending on the block start +%% offset, the threshold where the offset rebases were allowed (and the validation +%% changed in some other ways on top of that). The threshold where the specific +%% requirements were imposed on data splits to make each chunk belong to its own +%% 256 KiB bucket is set to ?STRICT_DATA_SPLIT_THRESHOLD. The code is then passed to +%% ar_merkle:validate_path/5. +get_data_path_validation_ruleset(BlockStartOffset, MerkleRebaseSupportThreshold) -> + get_data_path_validation_ruleset(BlockStartOffset, MerkleRebaseSupportThreshold, + ?STRICT_DATA_SPLIT_THRESHOLD). + +%% @doc Return the merkle proof validation ruleset code depending on the block start +%% offset, the threshold where the offset rebases were allowed (and the validation +%% changed in some other ways on top of that), and the threshold where the specific +%% requirements were imposed on data splits to make each chunk belong to its own +%% 256 KiB bucket. The code is then passed to ar_merkle:validate_path/5. +get_data_path_validation_ruleset(BlockStartOffset, MerkleRebaseSupportThreshold, + StrictDataSplitThreshold) -> + case BlockStartOffset >= MerkleRebaseSupportThreshold of + true -> + offset_rebase_support_ruleset; + false -> + case BlockStartOffset >= StrictDataSplitThreshold of + true -> + strict_data_split_ruleset; + false -> + strict_borders_ruleset + end + end. + %% @doc Validate a proof of access. validate(Args) -> {BlockStartOffset, RecallOffset, TXRoot, BlockSize, SPoA, StrictDataSplitThreshold, @@ -25,18 +56,8 @@ validate(Args) -> false -> RecallOffset - BlockStartOffset end, - ValidateDataPathFun = - case BlockStartOffset >= MerkleRebaseSupportThreshold of - true -> - fun ar_merkle:validate_path2/4; - false -> - case BlockStartOffset >= StrictDataSplitThreshold of - true -> - fun ar_merkle:validate_path_strict_data_split/4; - false -> - fun ar_merkle:validate_path_strict_borders/4 - end - end, + ValidateDataPathRuleset = get_data_path_validation_ruleset(BlockStartOffset, + MerkleRebaseSupportThreshold, StrictDataSplitThreshold), case ar_merkle:validate_path(TXRoot, RecallBucketOffset, BlockSize, TXPath) of false -> false; @@ -44,7 +65,8 @@ validate(Args) -> TXSize = TXEndOffset - TXStartOffset, RecallChunkOffset = RecallBucketOffset - TXStartOffset, DataPath = SPoA#poa.data_path, - case ValidateDataPathFun(DataRoot, RecallChunkOffset, TXSize, DataPath) of + case ar_merkle:validate_path(DataRoot, RecallChunkOffset, TXSize, DataPath, + ValidateDataPathRuleset) of false -> false; {ChunkID, ChunkStartOffset, ChunkEndOffset} -> diff --git a/apps/arweave/src/ar_tx.erl b/apps/arweave/src/ar_tx.erl index c2ed20301..1062267df 100644 --- a/apps/arweave/src/ar_tx.erl +++ b/apps/arweave/src/ar_tx.erl @@ -912,11 +912,11 @@ test_generate_chunk_tree_and_validate_path(Data, ChallengeLocation) -> {PathChunkID, StartOffset, EndOffset} = ar_merkle:validate_path(DataRoot, ChallengeLocation, byte_size(Data), DataPath), {PathChunkID, StartOffset, EndOffset} = - ar_merkle:validate_path_strict_data_split(DataRoot, ChallengeLocation, byte_size(Data), - DataPath), + ar_merkle:validate_path(DataRoot, ChallengeLocation, byte_size(Data), + DataPath, strict_data_split_ruleset), {PathChunkID, StartOffset, EndOffset} = - ar_merkle:validate_path_strict_borders(DataRoot, ChallengeLocation, byte_size(Data), - DataPath), + ar_merkle:validate_path(DataRoot, ChallengeLocation, byte_size(Data), + DataPath, strict_borders_ruleset), ?assertEqual(RealChunkID, PathChunkID), ?assert(ChallengeLocation >= StartOffset), ?assert(ChallengeLocation < EndOffset). @@ -1033,5 +1033,3 @@ test_get_tx_fee(DataSize, Height, Addr, ExpectedFee) -> ?assertEqual(ExpectedFee, ar_tx:get_tx_fee({DataSize, Rate, PricePerGiBMinute, KryderPlusRateMultiplier, Addr, Timestamp, Accounts, Height})). - - \ No newline at end of file