Develop 3.1 otp27 (#443)
* Initial tetsing of OTP27 * Profiles for testing in OTP 27
This commit is contained in:
parent
86c49bec00
commit
7ac99f05c7
6 changed files with 119 additions and 33 deletions
|
@ -1,7 +1,5 @@
|
|||
{minimum_otp_vsn, "22.3"}.
|
||||
|
||||
{erl_opts, [warnings_as_errors]}.
|
||||
|
||||
{xref_checks,
|
||||
[undefined_function_calls,undefined_functions,
|
||||
locals_not_used,
|
||||
|
@ -22,11 +20,14 @@
|
|||
{plugins, [rebar_eqc]}
|
||||
]},
|
||||
{test, [{extra_src_dirs, ["test/end_to_end", "test/property"]}
|
||||
]}
|
||||
]},
|
||||
{perf_full, [{erl_opts, [{d, perf_full}]}]},
|
||||
{perf_mini, [{erl_opts, [{d, perf_mini}]}]},
|
||||
{perf_prof, [{erl_opts, [{d, perf_prof}]}]}
|
||||
]}.
|
||||
|
||||
{deps, [
|
||||
{lz4, ".*", {git, "https://github.com/nhs-riak/erlang-lz4", {branch, "nhse-develop"}}},
|
||||
{lz4, ".*", {git, "https://github.com/nhs-riak/erlang-lz4", {branch, "nhse-develop-otp27"}}},
|
||||
{zstd, ".*", {git, "https://github.com/nhs-riak/zstd-erlang", {branch, "nhse-develop"}}}
|
||||
]}.
|
||||
|
||||
|
|
BIN
rebar3
BIN
rebar3
Binary file not shown.
|
@ -289,4 +289,45 @@ test_bloom(N, Runs) ->
|
|||
"fpr ~.3f with bytes-per-key ~.3f~n",
|
||||
[N, round(TSa), TSb / PosChecks, TSc / (Pos + Neg), FPR, BytesPerKey]).
|
||||
|
||||
|
||||
split_builder_speed_test_() ->
|
||||
{timeout, 60, fun split_builder_speed_tester/0}.
|
||||
|
||||
split_builder_speed_tester() ->
|
||||
N = 40000,
|
||||
Runs = 50,
|
||||
ListOfHashLists =
|
||||
lists:map(fun(_X) -> get_hashlist(N * 2) end, lists:seq(1, Runs)),
|
||||
|
||||
Timings =
|
||||
lists:map(
|
||||
fun(HashList) ->
|
||||
SlotCount =
|
||||
case length(HashList) of
|
||||
0 ->
|
||||
0;
|
||||
L ->
|
||||
min(128, max(2, (L - 1) div 512))
|
||||
end,
|
||||
InitTuple = list_to_tuple(lists:duplicate(SlotCount, [])),
|
||||
{MTC, SlotHashes} =
|
||||
timer:tc(
|
||||
fun map_hashes/3, [HashList, InitTuple, SlotCount]
|
||||
),
|
||||
{BTC, _Bloom} =
|
||||
timer:tc(
|
||||
fun build_bloom/2, [SlotHashes, SlotCount]
|
||||
),
|
||||
{MTC, BTC}
|
||||
end,
|
||||
ListOfHashLists
|
||||
),
|
||||
{MTs, BTs} = lists:unzip(Timings),
|
||||
io:format(
|
||||
user,
|
||||
"Total time in microseconds for map_hashlist ~w build_bloom ~w~n",
|
||||
[lists:sum(MTs), lists:sum(BTs)]
|
||||
).
|
||||
|
||||
|
||||
-endif.
|
||||
|
|
|
@ -168,7 +168,7 @@
|
|||
handle_info/2,
|
||||
terminate/2,
|
||||
code_change/3,
|
||||
format_status/2]).
|
||||
format_status/1]).
|
||||
|
||||
-export([
|
||||
pcl_snapstart/1,
|
||||
|
@ -1230,15 +1230,22 @@ terminate(Reason, _State=#state{is_snapshot=Snap}) when Snap == true ->
|
|||
terminate(Reason, _State) ->
|
||||
leveled_log:log(p0011, [Reason]).
|
||||
|
||||
format_status(normal, [_PDict, State]) ->
|
||||
State;
|
||||
format_status(terminate, [_PDict, State]) ->
|
||||
format_status(Status) ->
|
||||
case maps:get(reason, Status, normal) of
|
||||
terminate ->
|
||||
State = maps:get(state, Status),
|
||||
maps:update(
|
||||
state,
|
||||
State#state{
|
||||
manifest = redacted,
|
||||
levelzero_cache = redacted,
|
||||
levelzero_index = redacted,
|
||||
levelzero_astree = redacted}.
|
||||
|
||||
levelzero_astree = redacted},
|
||||
Status
|
||||
);
|
||||
_ ->
|
||||
Status
|
||||
end.
|
||||
|
||||
code_change(_OldVsn, State, _Extra) ->
|
||||
{ok, State}.
|
||||
|
@ -1993,9 +2000,10 @@ format_status_test() ->
|
|||
max_inmemory_tablesize=1000,
|
||||
sst_options=#sst_options{}}),
|
||||
{status, PCL, {module, gen_server}, SItemL} = sys:get_status(PCL),
|
||||
S = lists:keyfind(state, 1, lists:nth(5, SItemL)),
|
||||
{data,[{"State", S}]} = lists:nth(3, lists:nth(5, SItemL)),
|
||||
true = is_integer(array:size(element(2, S#state.manifest))),
|
||||
ST = format_status(terminate, [dict:new(), S]),
|
||||
Status = format_status(#{reason => terminate, state => S}),
|
||||
ST = maps:get(state, Status),
|
||||
?assertMatch(redacted, ST#state.manifest),
|
||||
?assertMatch(redacted, ST#state.levelzero_cache),
|
||||
?assertMatch(redacted, ST#state.levelzero_index),
|
||||
|
|
|
@ -103,7 +103,7 @@
|
|||
callback_mode/0,
|
||||
terminate/3,
|
||||
code_change/4,
|
||||
format_status/2]).
|
||||
format_status/1]).
|
||||
|
||||
%% states
|
||||
-export([starting/3,
|
||||
|
@ -926,11 +926,21 @@ terminate(Reason, _StateName, State) ->
|
|||
code_change(_OldVsn, StateName, State, _Extra) ->
|
||||
{ok, StateName, State}.
|
||||
|
||||
format_status(normal, [_PDict, _, State]) ->
|
||||
State;
|
||||
format_status(terminate, [_PDict, _, State]) ->
|
||||
|
||||
format_status(Status) ->
|
||||
case maps:get(reason, Status, normal) of
|
||||
terminate ->
|
||||
State = maps:get(state, Status),
|
||||
maps:update(
|
||||
state,
|
||||
State#state{
|
||||
blockindex_cache = redacted, fetch_cache = redacted}.
|
||||
blockindex_cache = redacted,
|
||||
fetch_cache = redacted},
|
||||
Status
|
||||
);
|
||||
_ ->
|
||||
Status
|
||||
end.
|
||||
|
||||
|
||||
%%%============================================================================
|
||||
|
@ -3940,10 +3950,11 @@ fetch_status_test() ->
|
|||
{ok, Pid, {FirstKey, LastKey}, _Bloom} =
|
||||
testsst_new(RP, Filename, 1, KVList1, length(KVList1), native),
|
||||
{status, Pid, {module, gen_statem}, SItemL} = sys:get_status(Pid),
|
||||
S = lists:keyfind(state, 1, lists:nth(5, SItemL)),
|
||||
{data,[{"State", {reader, S}}]} = lists:nth(3, lists:nth(5, SItemL)),
|
||||
true = is_integer(array:size(S#state.fetch_cache)),
|
||||
true = is_integer(array:size(element(2, S#state.blockindex_cache))),
|
||||
ST = format_status(terminate, [dict:new(), starting, S]),
|
||||
Status = format_status(#{reason => terminate, state => S}),
|
||||
ST = maps:get(state, Status),
|
||||
?assertMatch(redacted, ST#state.blockindex_cache),
|
||||
?assertMatch(redacted, ST#state.fetch_cache),
|
||||
ok = sst_close(Pid),
|
||||
|
|
|
@ -3,15 +3,33 @@
|
|||
-define(INFO, info).
|
||||
-export([all/0, suite/0]).
|
||||
-export([
|
||||
riak_ctperf/1, riak_fullperf/1, riak_profileperf/1
|
||||
riak_ctperf/1, riak_fullperf/1, riak_profileperf/1, riak_miniperf/1
|
||||
]).
|
||||
|
||||
all() -> [riak_ctperf].
|
||||
-ifdef(perf_full).
|
||||
all() -> [riak_fullperf].
|
||||
-else.
|
||||
-ifdef(perf_mini).
|
||||
all() -> [riak_miniperf].
|
||||
-else.
|
||||
-ifdef(perf_prof).
|
||||
all() -> [riak_profileperf].
|
||||
-else.
|
||||
all() -> [riak_ctperf].
|
||||
-endif.
|
||||
-endif.
|
||||
-endif.
|
||||
|
||||
suite() -> [{timetrap, {hours, 16}}].
|
||||
|
||||
riak_fullperf(_Config) ->
|
||||
riak_fullperf(2048, zstd, as_store).
|
||||
|
||||
riak_miniperf(_Config) ->
|
||||
Bucket = {<<"SensibleBucketTypeName">>, <<"SensibleBucketName0">>},
|
||||
R2A = riak_load_tester(Bucket, 2000000, 2048, [], zstd, as_store),
|
||||
output_result(R2A).
|
||||
|
||||
riak_fullperf(ObjSize, PM, LC) ->
|
||||
Bucket = {<<"SensibleBucketTypeName">>, <<"SensibleBucketName0">>},
|
||||
R2A = riak_load_tester(Bucket, 2000000, ObjSize, [], PM, LC),
|
||||
|
@ -33,7 +51,7 @@ riak_profileperf(_Config) ->
|
|||
{<<"SensibleBucketTypeName">>, <<"SensibleBucketName0">>},
|
||||
2000000,
|
||||
2048,
|
||||
[load, head, get, query, mini_query, full, guess, estimate, update],
|
||||
[load, full],
|
||||
zstd,
|
||||
as_store
|
||||
).
|
||||
|
@ -172,8 +190,9 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
P ->
|
||||
P
|
||||
end,
|
||||
io:format(user, "~nProfile ~p:~n", [P]),
|
||||
ProFun = profile_fun(P0, ProfileData),
|
||||
profile_test(Bookie1, ProFun)
|
||||
profile_test(Bookie1, ProFun, P)
|
||||
end,
|
||||
ProfileList),
|
||||
|
||||
|
@ -192,12 +211,14 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
SSTPids, CDBPids}.
|
||||
|
||||
|
||||
profile_test(Bookie, ProfileFun) ->
|
||||
profile_test(Bookie, ProfileFun, P) ->
|
||||
{Inker, Pcl, SSTPids, PClerk, CDBPids, IClerk} = get_pids(Bookie),
|
||||
TestPid = self(),
|
||||
profile_app(
|
||||
[TestPid, Bookie, Inker, IClerk, Pcl, PClerk] ++ SSTPids ++ CDBPids,
|
||||
ProfileFun).
|
||||
ProfileFun,
|
||||
P
|
||||
).
|
||||
|
||||
get_pids(Bookie) ->
|
||||
{ok, Inker, Pcl} = leveled_bookie:book_returnactors(Bookie),
|
||||
|
@ -250,7 +271,7 @@ memory_usage() ->
|
|||
element(2, lists:keyfind(processes, 1, MemoryUsage)),
|
||||
element(2, lists:keyfind(binary, 1, MemoryUsage))}.
|
||||
|
||||
profile_app(Pids, ProfiledFun) ->
|
||||
profile_app(Pids, ProfiledFun, P) ->
|
||||
|
||||
eprof:start(),
|
||||
eprof:start_profiling(Pids),
|
||||
|
@ -258,8 +279,12 @@ profile_app(Pids, ProfiledFun) ->
|
|||
ProfiledFun(),
|
||||
|
||||
eprof:stop_profiling(),
|
||||
eprof:analyze(total),
|
||||
eprof:stop().
|
||||
eprof:log(atom_to_list(P) ++ ".log"),
|
||||
eprof:analyze(total, [{filter, [{calls, 100}, {time, 200000}]}]),
|
||||
eprof:stop(),
|
||||
{ok, Analysis} = file:read_file(atom_to_list(P) ++ ".log"),
|
||||
io:format(user, "~n~s~n", [Analysis])
|
||||
.
|
||||
|
||||
size_estimate_summary(Bookie) ->
|
||||
Loops = 10,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue