Skip to content

Commit

Permalink
WIP
Browse files Browse the repository at this point in the history
  • Loading branch information
JamesPiechota committed Feb 26, 2025
1 parent 095e7fa commit 1438cee
Show file tree
Hide file tree
Showing 5 changed files with 353 additions and 272 deletions.
11 changes: 11 additions & 0 deletions apps/arweave/src/ar_chunk_storage.erl
Original file line number Diff line number Diff line change
Expand Up @@ -658,6 +658,12 @@ record_chunk(
sync_record_id(Packing), StoreID) of
ok ->
ChunkFileStart = get_chunk_file_start(PaddedEndOffset),
?LOG_DEBUG([{event, record_chunk},
{padded_end_offset, PaddedEndOffset},
{packing, ar_serialize:encode_packing(Packing, true)},
{store_id, StoreID},
{chunk_file_start, ChunkFileStart},
{filepath, Filepath}]),
ets:insert(chunk_storage_file_index,
{{ChunkFileStart, StoreID}, Filepath}),
{ok, maps:put(ChunkFileStart, Filepath, FileIndex), Packing};
Expand Down Expand Up @@ -724,6 +730,11 @@ get_handle_by_filepath(Filepath) ->
end.

write_chunk2(_PaddedOffset, ChunkOffset, Chunk, Filepath, F, Position) ->
?LOG_DEBUG([{event, write_chunk2},
{padded_offset, _PaddedOffset},
{chunk_offset, ChunkOffset},
{filepath, Filepath},
{position, Position}]),
ChunkOffsetBinary =
case ChunkOffset of
0 ->
Expand Down
27 changes: 26 additions & 1 deletion apps/arweave/src/ar_data_sync.erl
Original file line number Diff line number Diff line change
Expand Up @@ -1826,7 +1826,7 @@ read_chunk_with_metadata(
ar_serialize:encode_packing(StoredPacking, true)},
{modules_covering_seek_offset, ModuleIDs},
{chunk_data_key, ar_util:encode(ChunkDataKey)},
{read_fun, ReadFun}]),
{read_fun, ReadChunk}]),
invalidate_bad_data_record({AbsoluteOffset, ChunkSize, StoreID,
failed_to_read_chunk_data_path}),
{error, chunk_not_found};
Expand Down Expand Up @@ -2917,6 +2917,8 @@ update_chunks_index(Args, State) ->
AbsoluteChunkOffset = element(1, Args),
case ar_tx_blacklist:is_byte_blacklisted(AbsoluteChunkOffset) of
true ->
?LOG_DEBUG([{event, update_chunks_index_blacklisted},
{absolute_chunk_offset, AbsoluteChunkOffset}]),
ok;
false ->
update_chunks_index2(Args, State)
Expand All @@ -2933,11 +2935,34 @@ update_chunks_index2(Args, State) ->
PaddedOffset = ar_block:get_chunk_padded_offset(AbsoluteOffset),
case ar_sync_record:add(PaddedOffset, StartOffset, Packing, ar_data_sync, StoreID) of
ok ->
?LOG_DEBUG([{event, update_chunks_index2_ok},
{absolute_offset, AbsoluteOffset},
{padded_offset, PaddedOffset},
{start_offset, StartOffset},
{offset, Offset},
{packing, ar_serialize:encode_packing(Packing, true)},
{offset, Offset},
{chunk_size, ChunkSize},
{store_id, StoreID}]),
ok;
{error, Reason} ->
?LOG_ERROR([{event, update_chunks_index2_error},
{absolute_offset, AbsoluteOffset},
{padded_offset, PaddedOffset},
{start_offset, StartOffset},
{offset, Offset},
{chunk_size, ChunkSize},
{store_id, StoreID},
{reason, Reason}]),
{error, Reason}
end;
{error, Reason} ->
?LOG_ERROR([{event, update_chunks_index2_error},
{absolute_offset, AbsoluteOffset},
{offset, Offset},
{chunk_size, ChunkSize},
{store_id, StoreID},
{reason, Reason}]),
{error, Reason}
end.

Expand Down
30 changes: 30 additions & 0 deletions apps/arweave/src/ar_entropy_storage.erl
Original file line number Diff line number Diff line change
Expand Up @@ -239,17 +239,38 @@ record_chunk(
{error, Reason};
_ ->
PackedChunk = ar_packing_server:encipher_replica_2_9_chunk(Chunk, Entropy),
?LOG_DEBUG([{event, enciphered_chunk1},
{padded_end_offset, PaddedEndOffset},
{packing, ar_serialize:encode_packing(Packing, true)},
{store_id, StoreID},
{file_index, FileIndex},
{chunk, binary:part(Chunk, 0, 10)},
{entropy, binary:part(Entropy, 0, 10)},
{packed_chunk, binary:part(PackedChunk, 0, 10)}]),
ar_chunk_storage:record_chunk(
PaddedEndOffset, PackedChunk, Packing, StoreID,
StoreIDLabel, PackingLabel, FileIndex)
end;
no_entropy_yet ->
?LOG_DEBUG([{event, no_entropy_yet},
{padded_end_offset, PaddedEndOffset},
{packing, ar_serialize:encode_packing(unpacked_padded, true)},
{store_id, StoreID},
{file_index, FileIndex}]),
ar_chunk_storage:record_chunk(
PaddedEndOffset, Chunk, unpacked_padded, StoreID,
StoreIDLabel, PackingLabel, FileIndex);
{_EndOffset, Entropy} ->
Packing = {replica_2_9, RewardAddr},
PackedChunk = ar_packing_server:encipher_replica_2_9_chunk(Chunk, Entropy),
?LOG_DEBUG([{event, enciphered_chunk2},
{padded_end_offset, PaddedEndOffset},
{packing, ar_serialize:encode_packing(Packing, true)},
{store_id, StoreID},
{file_index, FileIndex},
{chunk, binary:part(Chunk, 0, 10)},
{entropy, binary:part(Entropy, 0, 10)},
{packed_chunk, binary:part(PackedChunk, 0, 10)}]),
ar_chunk_storage:record_chunk(
PaddedEndOffset, PackedChunk, Packing, StoreID,
StoreIDLabel, PackingLabel, FileIndex)
Expand Down Expand Up @@ -328,6 +349,15 @@ record_entropy(ChunkEntropy, BucketEndOffset, StoreID, RewardAddr) ->
{error, _} = Error ->
Error;
{_, UnpackedChunk} ->
?LOG_DEBUG([{event, enciphered_chunk3},
{padded_end_offset, EndOffset},
{start_offset, StartOffset},
{store_id, StoreID},
{chunk_file_start, ChunkFileStart},
{filepath, Filepath},
{chunk, binary:part(UnpackedChunk, 0, 10)},
{entropy, binary:part(ChunkEntropy, 0, 10)},
{packed_chunk, binary:part(ar_packing_server:encipher_replica_2_9_chunk(UnpackedChunk, ChunkEntropy), 0, 10)}]),
ar_sync_record:delete(EndOffset, StartOffset, ar_data_sync, StoreID),
ar_packing_server:encipher_replica_2_9_chunk(UnpackedChunk, ChunkEntropy)
end;
Expand Down
15 changes: 15 additions & 0 deletions apps/arweave/src/ar_mining_server.erl
Original file line number Diff line number Diff line change
Expand Up @@ -790,6 +790,10 @@ prepare_solution(poa1, Candidate, Solution) ->
error
end;
_ ->
?LOG_DEBUG([{event, prepare_solution_poa1_chunk1_not_set},
{recall_byte, RecallByte1},
{error, Error},
{chunk, binary:part(Chunk1, 0, 10)}]),
%% If we are a coordinated miner and not an exit node - the exit
%% node will fetch the proofs.
may_be_leave_it_to_exit_peer(
Expand All @@ -807,6 +811,10 @@ prepare_solution(poa2, Candidate, Solution) ->
{ok, PoA2} ->
prepare_solution(poa1, Candidate, Solution#mining_solution{ poa2 = PoA2 });
{error, _Error} ->
?LOG_DEBUG([{event, prepare_solution_poa2_chunk2_not_set},
{recall_byte, RecallByte2},
{error, _Error},
{chunk, binary:part(Chunk2, 0, 10)}]),
Modules = ar_storage_module:get_all(RecallByte2 + 1),
ModuleIDs = [ar_storage_module:id(Module) || Module <- Modules],
LogData = [{recall_byte2, RecallByte2}, {modules_covering_recall_byte, ModuleIDs}],
Expand Down Expand Up @@ -1101,6 +1109,10 @@ read_poa(RecallByte, ChunkOrSubChunk, Packing, Nonce) ->
ChunkOrSubChunk, PoA, Packing, PoAReply, Nonce}),
{error, chunk_mismatch};
{_ChunkOrSubChunk, Error, _Packing} ->
?LOG_DEBUG([{event, read_poa_reply},
{recall_byte, RecallByte},
{poa_reply, Error},
{packing, ar_serialize:encode_packing(Packing, true)}]),
Error
end.

Expand Down Expand Up @@ -1160,6 +1172,9 @@ read_unpacked_chunk(RecallByte, Proof) ->
unpacked_chunk = ar_packing_server:pad_chunk(UnpackedChunk),
tx_path = TXPath, data_path = DataPath }};
Error ->
?LOG_DEBUG([{event, read_unpacked_chunk_error},
{recall_byte, RecallByte},
{error, Error}]),
Error
end.

Expand Down
Loading

0 comments on commit 1438cee

Please sign in to comment.