Skip to content

Commit

Permalink
lint: enforce basic formatting rules from elvis
Browse files Browse the repository at this point in the history
  • Loading branch information
hlolli committed Dec 8, 2023
1 parent e58e966 commit fd93acc
Show file tree
Hide file tree
Showing 68 changed files with 700 additions and 752 deletions.
29 changes: 14 additions & 15 deletions apps/arweave/src/ar.erl
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ show_help() ->
fun({Opt, Desc}) ->
io:format("\t~s~s~n",
[
string:pad(Opt, 40, trailing, $ ),
string:pad(Opt, 40, trailing, $\s),
Desc
]
)
Expand Down Expand Up @@ -303,7 +303,7 @@ parse_cli_args(["mine" | Rest], C) ->
parse_cli_args(["peer", Peer | Rest], C = #config{ peers = Ps }) ->
case ar_util:safe_parse_peer(Peer) of
{ok, ValidPeer} ->
parse_cli_args(Rest, C#config{ peers = [ValidPeer|Ps] });
parse_cli_args(Rest, C#config{ peers = [ValidPeer | Ps] });
{error, _} ->
io:format("Peer ~p is invalid.~n", [Peer]),
parse_cli_args(Rest, C)
Expand All @@ -328,16 +328,16 @@ parse_cli_args(["local_peer", Peer | Rest], C = #config{ local_peers = Peers })
parse_cli_args(["sync_from_local_peers_only" | Rest], C) ->
parse_cli_args(Rest, C#config{ sync_from_local_peers_only = true });
parse_cli_args(["transaction_blacklist", File | Rest],
C = #config{ transaction_blacklist_files = Files } ) ->
C = #config{transaction_blacklist_files = Files}) ->
parse_cli_args(Rest, C#config{ transaction_blacklist_files = [File | Files] });
parse_cli_args(["transaction_blacklist_url", URL | Rest],
C = #config{ transaction_blacklist_urls = URLs} ) ->
C = #config{transaction_blacklist_urls = URLs}) ->
parse_cli_args(Rest, C#config{ transaction_blacklist_urls = [URL | URLs] });
parse_cli_args(["transaction_whitelist", File | Rest],
C = #config{ transaction_whitelist_files = Files } ) ->
C = #config{transaction_whitelist_files = Files}) ->
parse_cli_args(Rest, C#config{ transaction_whitelist_files = [File | Files] });
parse_cli_args(["transaction_whitelist_url", URL | Rest],
C = #config{ transaction_whitelist_urls = URLs} ) ->
C = #config{transaction_whitelist_urls = URLs}) ->
parse_cli_args(Rest, C#config{ transaction_whitelist_urls = [URL | URLs] });
parse_cli_args(["port", Port | Rest], C) ->
parse_cli_args(Rest, C#config{ port = list_to_integer(Port) });
Expand Down Expand Up @@ -415,7 +415,7 @@ parse_cli_args(["start_from_block", H | Rest], C) ->
end;
parse_cli_args(["start_from_latest_state" | Rest], C) ->
parse_cli_args(Rest, C#config{ start_from_latest_state = true });
parse_cli_args(["init" | Rest], C)->
parse_cli_args(["init" | Rest], C) ->
parse_cli_args(Rest, C#config{ init = true });
parse_cli_args(["internal_api_secret", Secret | Rest], C)
when length(Secret) >= ?INTERNAL_API_SECRET_MIN_LEN ->
Expand Down Expand Up @@ -450,7 +450,7 @@ parse_cli_args(["tx_validators", Num | Rest], C) ->
parse_cli_args(Rest, C#config{ tx_validators = list_to_integer(Num) });
parse_cli_args(["post_tx_timeout", Num | Rest], C) ->
parse_cli_args(Rest, C#config { post_tx_timeout = list_to_integer(Num) });
parse_cli_args(["tx_propagation_parallelization", Num|Rest], C) ->
parse_cli_args(["tx_propagation_parallelization", Num | Rest], C) ->
parse_cli_args(Rest, C#config{ tx_propagation_parallelization = list_to_integer(Num) });
parse_cli_args(["max_connections", Num | Rest], C) ->
parse_cli_args(Rest, C#config{ max_connections = list_to_integer(Num) });
Expand Down Expand Up @@ -528,7 +528,7 @@ parse_cli_args(["cm_poll_interval", Num | Rest], C) ->
parse_cli_args(["cm_peer", Peer | Rest], C = #config{ cm_peers = Ps }) ->
case ar_util:safe_parse_peer(Peer) of
{ok, ValidPeer} ->
parse_cli_args(Rest, C#config{ cm_peers = [ValidPeer|Ps] });
parse_cli_args(Rest, C#config{ cm_peers = [ValidPeer | Ps] });
{error, _} ->
io:format("Peer ~p is invalid.~n", [Peer]),
parse_cli_args(Rest, C)
Expand Down Expand Up @@ -557,7 +557,7 @@ start(Config) ->
% Set logger to output all levels of logs to the console
% when running in a dumb terminal.
logger:add_handler(console, logger_std_h, #{level => all});
_->
_ ->
ok
end,
case Config#config.init of
Expand Down Expand Up @@ -595,7 +595,7 @@ start(normal, _Args) ->
chars_limit => 16256,
max_size => 8128,
depth => 256,
template => [time," [",level,"] ",mfa,":",line," ",msg,"\n"]
template => [time, " [", level, "] ", mfa, ":", line, " ", msg, "\n"]
},
logger:set_handler_config(default, formatter, {logger_formatter, LoggerFormatterConsole}),
logger:set_handler_config(default, level, error),
Expand Down Expand Up @@ -630,7 +630,7 @@ start(normal, _Args) ->
depth => 256,
legacy_header => false,
single_line => true,
template => [time," [",level,"] ",mfa,":",line," ",msg,"\n"]
template => [time, " [", level, "] ", mfa, ":", line, " ", msg, "\n"]
},
logger:set_handler_config(disk_log, formatter, {logger_formatter, LoggerFormatterDisk}),
logger:set_application_level(arweave, Level),
Expand Down Expand Up @@ -814,11 +814,10 @@ commandline_parser_test_() ->
Addr = crypto:strong_rand_bytes(32),
Tests =
[
{"peer 1.2.3.4 peer 5.6.7.8:9", #config.peers, [{5,6,7,8,9},{1,2,3,4,1984}]},
{"peer 1.2.3.4 peer 5.6.7.8:9", #config.peers, [{5, 6, 7, 8, 9}, {1, 2, 3, 4, 1984}]},
{"mine", #config.mine, true},
{"port 22", #config.port, 22},
{"mining_addr "
++ binary_to_list(ar_util:encode(Addr)), #config.mining_addr, Addr}
{"mining_addr " ++ binary_to_list(ar_util:encode(Addr)), #config.mining_addr, Addr}
],
X = string:split(string:join([ L || {L, _, _} <- Tests ], " "), " ", all),
C = parse_cli_args(X, #config{}),
Expand Down
41 changes: 22 additions & 19 deletions apps/arweave/src/ar_arql_db.erl
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,9 @@ handle_call({select_txs_by, Opts}, _, #{ conn := Conn } = State) ->
{Time, Reply} = timer:tc(fun() ->
case sql_fetchall(Conn, SQL, Params, ?DRIVER_TIMEOUT) of
Rows when is_list(Rows) ->
lists:map(fun tx_map/1, Rows)
{ok, lists:map(fun tx_map/1, Rows)};
{error, Reason} ->
{error, Reason}
end
end),
record_query_time(select_txs_by, Time),
Expand All @@ -259,7 +261,7 @@ handle_call({select_block_by_tx_id, TXID}, _, State) ->
{Time, Reply} = timer:tc(fun() ->
case ar_sqlite3:step(Stmt, ?DRIVER_TIMEOUT) of
{row, Row} -> {ok, block_map(Row)};
done -> not_found
done -> {error, not_found}
end
end),
ar_sqlite3:reset(Stmt, ?DRIVER_TIMEOUT),
Expand All @@ -268,31 +270,32 @@ handle_call({select_block_by_tx_id, TXID}, _, State) ->

handle_call({select_tags_by_tx_id, TXID}, _, State) ->
#{ select_tags_by_tx_id_stmt := Stmt } = State,
ok = ar_sqlite3:bind(Stmt, [TXID], ?DRIVER_TIMEOUT),
{Time, Reply} = timer:tc(fun() ->
case stmt_fetchall(Stmt, [TXID], ?DRIVER_TIMEOUT) of
Rows when is_list(Rows) ->
lists:map(fun tags_map/1, Rows)
{ok, lists:map(fun tags_map/1, Rows)};
{error, Reason} ->
{error, Reason}
end
end),
record_query_time(select_tags_by_tx_id, Time),
{reply, Reply, State};

handle_call({eval_legacy_arql, Query}, _, #{ conn := Conn } = State) ->
{Time, {Reply, _SQL, _Params}} = timer:tc(fun() ->
case catch eval_legacy_arql_where_clause(Query) of
{WhereClause, Params} ->
SQL = lists:concat([
"SELECT tx.id FROM tx ",
"JOIN block ON tx.block_indep_hash = block.indep_hash ",
"WHERE ", WhereClause,
" ORDER BY block.height DESC, tx.id DESC"
]),
case sql_fetchall(Conn, SQL, Params, ?DRIVER_TIMEOUT) of
Rows when is_list(Rows) ->
{lists:map(fun([TXID]) -> TXID end, Rows), SQL, Params}
end;
bad_query ->
{bad_query, 'n/a', 'n/a'}
try
{WhereClause, Params} = eval_legacy_arql_where_clause(Query),
SQL = lists:concat([
"SELECT tx.id FROM tx ",
"JOIN block ON tx.block_indep_hash = block.indep_hash ",
"WHERE ", WhereClause,
" ORDER BY block.height DESC, tx.id DESC"
]),
Rows = sql_fetchall(Conn, SQL, Params, ?DRIVER_TIMEOUT),
{ok, {lists:map(fun([TXID]) -> TXID end, Rows), SQL, Params}}
catch
_:_ -> {error, {bad_query, 'n/a', 'n/a'}}
end
end),
record_query_time(eval_legacy_arql, Time),
Expand Down Expand Up @@ -551,7 +554,7 @@ eval_legacy_arql_where_clause({equals, Key, Value})
"tx.id IN (SELECT tx_id FROM tag WHERE name = ? and value = ?)",
[Key, Value]
};
eval_legacy_arql_where_clause({'and',E1,E2}) ->
eval_legacy_arql_where_clause({'and', E1, E2}) ->
{E1WhereClause, E1Params} = eval_legacy_arql_where_clause(E1),
{E2WhereClause, E2Params} = eval_legacy_arql_where_clause(E2),
{
Expand All @@ -564,7 +567,7 @@ eval_legacy_arql_where_clause({'and',E1,E2}) ->
]),
E1Params ++ E2Params
};
eval_legacy_arql_where_clause({'or',E1,E2}) ->
eval_legacy_arql_where_clause({'or', E1, E2}) ->
{E1WhereClause, E1Params} = eval_legacy_arql_where_clause(E1),
{E2WhereClause, E2Params} = eval_legacy_arql_where_clause(E2),
{
Expand Down
2 changes: 1 addition & 1 deletion apps/arweave/src/ar_arql_middleware.erl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
-module(ar_arql_middleware).
-behavior(cowboy_middleware).
-behaviour(cowboy_middleware).

-export([execute/2]).

Expand Down
2 changes: 1 addition & 1 deletion apps/arweave/src/ar_base32.erl
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ encode_binary(<<B1:8, B2:8, B3:8, B4:8, B5:8, Ls/bits>>, A) ->

-compile({inline, [{b32e, 1}]}).
b32e(X) ->
element(X+1, {
element(X + 1, {
$a, $b, $c, $d, $e, $f, $g, $h, $i, $j, $k, $l, $m,
$n, $o, $p, $q, $r, $s, $t, $u, $v, $w, $x, $y, $z,
$2, $3, $4, $5, $6, $7, $8, $9
Expand Down
20 changes: 10 additions & 10 deletions apps/arweave/src/ar_bench_packing.erl
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ run_benchmark(Test, TotalMegaBytes, JIT, LargePages, HardwareAES, VDF) ->
erlang:system_time() div 1000000000,
Test, TotalMegaBytes, JIT, LargePages, HardwareAES, VDF,
Init, Total]),

file:write(File, Output),
io:format("~n"),
io:format(Output),
Expand Down Expand Up @@ -109,7 +109,7 @@ generate_input(TotalMegaBytes, Root, RewardAddress) ->

Spora25Filename = spora_2_5_filename(TotalMegaBytes),
io:format("~s", [Spora25Filename]),

{ok, Spora25FileHandle} = file:open(Spora25Filename, [write, binary]),
ar_bench_timer:record({wall},
fun dirty_test/4, [
Expand Down Expand Up @@ -140,7 +140,7 @@ write_random_data(UnpackedFilename, TotalBytes) ->
write_chunks(File, TotalBytes),
file:close(File).
write_chunks(File, TotalBytes) ->
ChunkSize = 1024*1024, % 1MB
ChunkSize = 1024 * 1024, % 1MB
RemainingBytes = TotalBytes,
write_chunks_loop(File, RemainingBytes, ChunkSize).
write_chunks_loop(_File, 0, _) ->
Expand Down Expand Up @@ -262,7 +262,7 @@ dirty_test({TotalMegaBytes, _, _, _} = Permutation, WorkerFun, Args, NumWorkers)
Workers = [spawn_monitor(
fun() -> dirty_worker(
N,
Permutation,
Permutation,
WorkerFun,
Args,
WorkerSize * (N - 1),
Expand All @@ -272,7 +272,7 @@ dirty_test({TotalMegaBytes, _, _, _} = Permutation, WorkerFun, Args, NumWorkers)
[
receive
{'DOWN', Ref, process, Pid, _Result} -> erlang:demonitor(Ref), ok
after
after
60000 -> timeout
end || {Pid, Ref} <- Workers
],
Expand All @@ -299,7 +299,7 @@ baseline_pack_chunks(WorkerID,
_, JIT, LargePages, HardwareAES
} = Permutation,
{
RandomXState, UnpackedFileHandle, PackedFileHandle,
RandomXState, UnpackedFileHandle, PackedFileHandle,
Root, RewardAddress, PackingType
} = Args,
Offset, Size) ->
Expand All @@ -318,7 +318,7 @@ baseline_pack_chunks(WorkerID,
io:format("Error reading file: ~p~n", [Reason]),
0
end,
baseline_pack_chunks(WorkerID, Permutation, Args, Offset+ChunkSize, RemainingSize).
baseline_pack_chunks(WorkerID, Permutation, Args, Offset + ChunkSize, RemainingSize).

%% --------------------------------------------------------------------------------------------
%% Baseline Repacking Test
Expand All @@ -345,7 +345,7 @@ baseline_repack_chunks(WorkerID,
JIT, LargePages, HardwareAES),
{ok, RepackedChunk} =ar_mine_randomx:randomx_encrypt_chunk_nif(
RandomXState, RepackKey, UnpackedChunk, RepackingRounds,
JIT, LargePages, HardwareAES),
JIT, LargePages, HardwareAES),
file:pwrite(RepackedFileHandle, Offset, RepackedChunk),
(Size - ChunkSize);
eof ->
Expand All @@ -354,7 +354,7 @@ baseline_repack_chunks(WorkerID,
io:format("Error reading file: ~p~n", [Reason]),
0
end,
baseline_repack_chunks(WorkerID, Permutation, Args, Offset+ChunkSize, RemainingSize).
baseline_repack_chunks(WorkerID, Permutation, Args, Offset + ChunkSize, RemainingSize).

%% --------------------------------------------------------------------------------------------
%% NIF Repacking Test
Expand Down Expand Up @@ -387,7 +387,7 @@ nif_repack_chunks(WorkerID,
io:format("Error reading file: ~p~n", [Reason]),
0
end,
nif_repack_chunks(WorkerID, Permutation, Args, Offset+ChunkSize, RemainingSize).
nif_repack_chunks(WorkerID, Permutation, Args, Offset + ChunkSize, RemainingSize).

%% --------------------------------------------------------------------------------------------
%% Helpers
Expand Down
2 changes: 1 addition & 1 deletion apps/arweave/src/ar_bench_timer.erl
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ get_avg(Times) when is_list(Times) ->
end;
get_avg(Key) ->
get_avg(get_times(Key)).

get_times(Key) ->
[Match || [Match] <- ets:match(total_time, {{Key, '_'}, '$1'})].
get_timing_keys() ->
Expand Down
4 changes: 2 additions & 2 deletions apps/arweave/src/ar_blacklist_middleware.erl
Original file line number Diff line number Diff line change
Expand Up @@ -101,13 +101,13 @@ reset_rate_limit(TableID, IPAddr, Path) ->
end.

increment_ip_addr(IPAddr, Req) ->
case ets:whereis(?MODULE) of
case ets:whereis(?MODULE) of
undefined -> pass;
_ -> update_ip_addr(IPAddr, Req, 1)
end.

decrement_ip_addr(IPAddr, Req) ->
case ets:whereis(?MODULE) of
case ets:whereis(?MODULE) of
undefined -> pass;
_ -> update_ip_addr(IPAddr, Req, -1)
end.
Expand Down
18 changes: 7 additions & 11 deletions apps/arweave/src/ar_block.erl
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,7 @@ block_field_size_limit(B) ->
Check = (byte_size(B#block.nonce) =< 512) and
(byte_size(B#block.previous_block) =< 48) and
(byte_size(integer_to_binary(B#block.timestamp)) =< ?TIMESTAMP_FIELD_SIZE_LIMIT) and
(byte_size(integer_to_binary(B#block.last_retarget))
=< ?TIMESTAMP_FIELD_SIZE_LIMIT) and
(byte_size(integer_to_binary(B#block.last_retarget)) =< ?TIMESTAMP_FIELD_SIZE_LIMIT) and
(byte_size(integer_to_binary(B#block.diff)) =< DiffBytesLimit) and
(byte_size(integer_to_binary(B#block.height)) =< 20) and
(byte_size(B#block.hash) =< 48) and
Expand Down Expand Up @@ -249,9 +248,8 @@ compute_next_vdf_difficulty(PrevB) ->
{0, 0},
HistoryPart
),
NewVDFDifficulty =
(VDFIntervalTotal * VDFDifficulty) div IntervalTotal,
EMAVDFDifficulty = (9*VDFDifficulty + NewVDFDifficulty) div 10,
NewVDFDifficulty = (VDFIntervalTotal * VDFDifficulty) div IntervalTotal,
EMAVDFDifficulty = (9 * VDFDifficulty + NewVDFDifficulty) div 10,
?LOG_DEBUG([{event, vdf_difficulty_retarget},
{height, Height},
{old_vdf_difficulty, VDFDifficulty},
Expand Down Expand Up @@ -484,8 +482,7 @@ generate_block_data_segment_base(B) ->
integer_to_binary(ScheduledRateDividend),
integer_to_binary(ScheduledRateDivisor),
integer_to_binary(B#block.packing_2_5_threshold),
integer_to_binary(B#block.strict_data_split_threshold)
| Props
integer_to_binary(B#block.strict_data_split_threshold) | Props
];
false ->
Props
Expand Down Expand Up @@ -514,8 +511,8 @@ generate_block_data_segment_base(B) ->
%% of the two recall ranges.
get_recall_range(H0, PartitionNumber, PartitionUpperBound) ->
RecallRange1Offset = binary:decode_unsigned(binary:part(H0, 0, 8), big),
RecallRange1Start = PartitionNumber * ?PARTITION_SIZE
+ RecallRange1Offset rem min(?PARTITION_SIZE, PartitionUpperBound),
RecallRange1Start = PartitionNumber * ?PARTITION_SIZE +
RecallRange1Offset rem min(?PARTITION_SIZE, PartitionUpperBound),
RecallRange2Start = binary:decode_unsigned(H0, big) rem PartitionUpperBound,
{RecallRange1Start, RecallRange2Start}.

Expand Down Expand Up @@ -584,8 +581,7 @@ generate_size_tagged_list_from_txs(TXs, Height) ->
End = Pos + DataSize,
case Height >= ar_fork:height_2_5() of
true ->
Padding = ar_tx:get_weave_size_increase(DataSize, Height)
- DataSize,
Padding = ar_tx:get_weave_size_increase(DataSize, Height) - DataSize,
%% Encode the padding information in the Merkle tree.
case Padding > 0 of
true ->
Expand Down
Loading

0 comments on commit fd93acc

Please sign in to comment.