Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
91ec519
chore: primitive alias owner helpers
charmful0x Mar 5, 2026
be56284
feat: owner ex/inclusion filters
charmful0x Mar 5, 2026
9acee0d
feat: parse_exclude_tags/2 util
charmful0x Mar 6, 2026
a3efcb9
feat: l1 filters handler & minimal patches
charmful0x Mar 6, 2026
08fdeda
feat: process_l1_candidates/3 & wiring with Id path
charmful0x Mar 6, 2026
928e13b
chore: clearer L1 filters error logging
charmful0x Mar 6, 2026
8c718e2
feat: bundle bytestream download + in-memory processing
charmful0x Mar 6, 2026
9b5163d
perf: safe_max depth default & L1 bundle safe max size
charmful0x Mar 6, 2026
8236c0a
fix: skip normalizing not_found tag
charmful0x Mar 6, 2026
48c8320
feat: support comma separatred aliases
charmful0x Mar 6, 2026
c69cef6
feat: depth recursion cap setter/getter
charmful0x Mar 6, 2026
e54c2c3
docs: add fns doc
charmful0x Mar 6, 2026
88b96c8
feat: &include-tag filter
charmful0x Mar 6, 2026
f56bcd5
feat: load l1 tx offset
charmful0x Mar 6, 2026
54e9366
chore: bump default MEMORY_SAFE_CAP
charmful0x Mar 7, 2026
7167b01
docs: document ensure_l1_tx_offset
charmful0x Mar 7, 2026
6cedcd1
chore: emit event on network fetch for missing l1 store offsets
charmful0x Mar 7, 2026
73476a7
fix: address comments
charmful0x Mar 8, 2026
68a3a5c
feat: block N depth indexing
charmful0x Mar 10, 2026
bfbdd00
test: add tests
JamesPiechota Mar 10, 2026
8ecc72c
chore: some minor renaming
JamesPiechota Mar 11, 2026
1d063ad
refactor: nested case -> maybe and some naming changes
JamesPiechota Mar 11, 2026
c495cef
chore: add some more logging to the L1 TX indexer
JamesPiechota Mar 11, 2026
86d2a2b
chore: have indexer return the number of L1 TX items indexed
JamesPiechota Mar 11, 2026
1c2ab10
fix: clean up rebase/merge conflicts
JamesPiechota Mar 12, 2026
0a879f2
fix: update copycat response to work better with reindexing script
JamesPiechota Mar 12, 2026
d02aab1
fix: remove blocking io from hb_event overload path
JamesPiechota Mar 13, 2026
d8bc29b
chore: minor update to one of the test uploader scripts
JamesPiechota Mar 13, 2026
25d5799
fix: add more metrics to track L1 TXID indexing
JamesPiechota Mar 13, 2026
7f03c65
fix: remove some logging from copycat
JamesPiechota Mar 13, 2026
5f05c94
fix: xxx_debug -> debug_xxx for ?event topics so they don't get added…
JamesPiechota Mar 13, 2026
c8fc56f
fix: event topics must be atoms
JamesPiechota Mar 13, 2026
7e849f1
fix: allow binary event names
JamesPiechota Mar 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions src/dev_arweave.erl
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ post_tx(_Base, Request, Opts, <<"tx@1.0">>) ->
CacheRes = hb_cache:write(Request, Opts),
case CacheRes of
{ok, _} ->
?event(arweave_debug, {tx_cached, {msg, Request}, {status, ok}});
?event(debug_arweave, {tx_cached, {msg, Request}, {status, ok}});
_ ->
?event(error, {tx_failed_to_cache, {msg, Request}, CacheRes})
end;
Expand Down Expand Up @@ -383,7 +383,7 @@ get_chunk_range(_Base, Request, Opts) ->
%% cannot span the strict data split threshold, so mixed ranges are rejected.
fetch_chunk_range(Offset, Length, Opts) ->
EndOffset = Offset + Length - 1,
?event(arweave_debug, {fetch_chunk_range,
?event(debug_arweave, {fetch_chunk_range,
{offset, Offset},
{end_offset, EndOffset},
{size, Length}}),
Expand Down Expand Up @@ -421,7 +421,7 @@ fetch_post_threshold(Offset, EndOffset, Opts) ->
true ->
ExtraOffset = min(
lists:last(Offsets) + ?DATA_CHUNK_SIZE, EndOffset),
?event(arweave_debug, {fetching_extra_chunk,
?event(debug_arweave, {fetching_extra_chunk,
{binary_size, BinarySize},
{expected_length, ExpectedLength},
{extra_offset, ExtraOffset}}),
Expand Down Expand Up @@ -458,7 +458,7 @@ fill_gaps(ChunkInfos, Offset, EndOffset, Opts) ->
% be needed. We have yet to find an L1 TX that is chunked in such
% a way as to create gaps when using our naive 256KiB chunking.
GapOffsets = [Start || {Start, _End} <- Gaps],
?event(arweave_debug,
?event(debug_arweave,
{fill_gaps,
{offset, Offset},
{end_offset, EndOffset},
Expand All @@ -477,7 +477,7 @@ fill_gaps(ChunkInfos, Offset, EndOffset, Opts) ->
{gap_offsets, GapOffsets}}),
case fetch_and_collect(GapOffsets, Opts) of
{ok, NewInfos} ->
?event(arweave_debug, {fill_gaps, NewInfos}),
?event(debug_arweave, {fill_gaps, NewInfos}),
fill_gaps(
Sorted ++ NewInfos,
Offset, EndOffset, Opts
Expand All @@ -504,7 +504,7 @@ generate_offsets(Start, End, Step) ->

generate_offsets(Current, End, _Step, Acc) when Current > End ->
Offsets = lists:reverse(Acc),
?event(arweave_debug, {fetch_chunk_offsets, {offsets, Offsets}}),
?event(debug_arweave, {fetch_chunk_offsets, {offsets, Offsets}}),
Offsets;
generate_offsets(Current, End, Step, Acc) ->
generate_offsets(Current + Step, End, Step, [Current | Acc]).
Expand All @@ -520,7 +520,7 @@ collect_chunks([{ok, JSON} | Rest], Acc) ->
Chunk = hb_util:decode(maps:get(<<"chunk">>, JSON)),
AbsEnd = hb_util:int(maps:get(<<"absolute_end_offset">>, JSON)),
AbsStart = AbsEnd - byte_size(Chunk) + 1,
?event(arweave_debug,
?event(debug_arweave,
{collect_chunks,
{abs_start, AbsStart},
{abs_end, AbsEnd},
Expand Down Expand Up @@ -585,7 +585,7 @@ assemble_chunks(ChunkInfos, Offset) ->
% The first chunk may start before the requested offset;
% trim the leading bytes to start exactly at Offset.
Skip = Offset - ChunkStart,
?event(arweave_debug, {assemble_chunks,
?event(debug_arweave, {assemble_chunks,
{skip, Skip},
{chunk_start, ChunkStart},
{offset, Offset},
Expand All @@ -594,7 +594,7 @@ assemble_chunks(ChunkInfos, Offset) ->
}),
binary:part(Data, Skip, byte_size(Data) - Skip);
false ->
?event(arweave_debug, {assemble_chunks,
?event(debug_arweave, {assemble_chunks,
{chunk_start, ChunkStart},
{offset, Offset},
{byte_size, byte_size(Data)}
Expand Down Expand Up @@ -730,7 +730,7 @@ request(Method, Path, Opts) ->
request(Method, Path, Extra, Opts) ->
request(Method, Path, Extra, [], Opts).
request(Method, Path, Extra, LogExtra, Opts) ->
?event(arweave_debug, {request,
?event(debug_arweave, {request,
{method, Method}, {path, {explicit, Path}}, {log_extra, LogExtra}}),
Res =
hb_http:request(
Expand Down Expand Up @@ -793,7 +793,7 @@ to_message(Path = <<"/tx">>, <<"POST">>, {ok, Response}, LogExtra, _Opts) ->
to_message(Path = <<"/tx/", TXID/binary>>, <<"GET">>, {ok, #{ <<"body">> := Body }}, LogExtra, Opts) ->
event_request(Path, <<"GET">>, 200, LogExtra),
TXHeader = ar_tx:json_struct_to_tx(hb_json:decode(Body)),
?event(arweave_debug,
?event(debug_arweave,
{arweave_tx_response,
{path, {explicit, Path}},
{raw_body, {explicit, Body}},
Expand Down
2 changes: 1 addition & 1 deletion src/dev_bundler.erl
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ handle_task_complete(WorkerPID, Task, Result, State = #state{
bundles = Bundles
}) ->
#task{bundle_id = BundleID} = Task,
?event(bundler_debug, dev_bundler_task:log_task(task_complete, Task, [])),
?event(debug_bundler, dev_bundler_task:log_task(task_complete, Task, [])),
State1 = State#state{
workers = maps:put(WorkerPID, idle, Workers)
},
Expand Down
8 changes: 4 additions & 4 deletions src/dev_bundler_cache.erl
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ complete_tx(TX, Opts) ->
%% @doc Set the status of a bundle TX.
set_tx_status(TX, Status, Opts) ->
Path = tx_path(TX, Opts),
?event(bundler_debug, {set_tx_status, {path, Path}, {status, Status}}),
?event(debug_bundler, {set_tx_status, {path, Path}, {status, Status}}),
write_pseudopath(Path, Status, Opts).

%% @doc Get the status of a bundle TX.
Expand Down Expand Up @@ -134,7 +134,7 @@ load_bundle_states(Opts) ->
<<"complete">> -> false; % Skip completed bundles
Status ->
?event(
bundler_debug,
debug_bundler,
{loaded_tx_state,
{id, {string, TXID}},
{status, Status}
Expand All @@ -148,10 +148,10 @@ load_bundle_states(Opts) ->

%% @doc Load a TX from cache by its ID.
load_tx(TXID, Opts) ->
?event(bundler_debug, {load_tx, {tx_id, {explicit, TXID}}}),
?event(debug_bundler, {load_tx, {tx_id, {explicit, TXID}}}),
case hb_cache:read(TXID, Opts) of
{ok, TX} ->
?event(bundler_debug, {loaded_tx, {tx_id, {explicit, TXID}}}),
?event(debug_bundler, {loaded_tx, {tx_id, {explicit, TXID}}}),
hb_cache:ensure_all_loaded(TX, Opts);
_ ->
?event(error, {failed_to_load_tx, {tx_id, {explicit, TXID}}}),
Expand Down
2 changes: 1 addition & 1 deletion src/dev_bundler_recovery.erl
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ recover_bundle(ServerPID, TXID, Status, Opts) ->
Opts,
fun(ItemID, _Item) ->
?event(
bundler_debug,
debug_bundler,
{loaded_bundle_item,
{tx_id, {explicit, TXID}},
{item_id, {explicit, ItemID}}
Expand Down
6 changes: 3 additions & 3 deletions src/dev_bundler_task.erl
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ worker_loop() ->
%% @doc Execute a specific task.
execute_task(#task{type = post_tx, data = Items, opts = Opts} = Task) ->
try
?event(bundler_debug, log_task(executing_task, Task, [])),
?event(debug_bundler, log_task(executing_task, Task, [])),
% Get price and anchor
{ok, TX} = dev_codec_tx:to(lists:reverse(Items), #{}, #{}),
DataSize = TX#tx.data_size,
Expand Down Expand Up @@ -83,7 +83,7 @@ execute_task(#task{type = post_tx, data = Items, opts = Opts} = Task) ->

execute_task(#task{type = build_proofs, data = CommittedTX, opts = Opts} = Task) ->
try
?event(bundler_debug, log_task(executing_task, Task, [])),
?event(debug_bundler, log_task(executing_task, Task, [])),
% Calculate chunks and proofs
TX = hb_message:convert(
CommittedTX, <<"tx@1.0">>, <<"structured@1.0">>, Opts),
Expand Down Expand Up @@ -139,7 +139,7 @@ execute_task(#task{type = build_proofs, data = CommittedTX, opts = Opts} = Task)
execute_task(#task{type = post_proof, data = Proof, opts = Opts} = Task) ->
#{chunk := Chunk, data_path := DataPath, offset := Offset,
data_size := DataSize, data_root := DataRoot} = Proof,
?event(bundler_debug, log_task(executing_task, Task, [])),
?event(debug_bundler, log_task(executing_task, Task, [])),
Request = #{
<<"chunk">> => hb_util:encode(Chunk),
<<"data_path">> => hb_util:encode(DataPath),
Expand Down
Loading