Improve perf_SUITE test (#445)

* Improve perf_SUITE test

The update teat is refactored so as not to generate. a large KV list which dominates the memory utilisation.

The update and the get tests changes to do a head before each operation - which emulates how this will work in RIAK.

* Revert default setting change

* Don't pre-calculate key list

For fetches - reduces memory required for test process not database (and consequent distortion to measured results)

* Tidy ++ in tests

Removes some rogue results from profile

* Update testutil.erl

* Test fixes

* Tidy generate_chunk for profiling

* Revert "Tidy generate_chunk for profiling"

This reverts commit 1f6cff446ca6b9855f1e3aa732b32e0e5c14c9a5.

* Resize profile test
This commit is contained in:
Martin Sumner 2024-09-02 11:17:35 +01:00 committed by GitHub
parent 7b5b18ed06
commit acf30599e9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 181 additions and 81 deletions

View file

@ -302,13 +302,7 @@ split_builder_speed_tester() ->
Timings = Timings =
lists:map( lists:map(
fun(HashList) -> fun(HashList) ->
SlotCount = SlotCount = min(128, max(2, (length(HashList) - 1) div 512)),
case length(HashList) of
0 ->
0;
L ->
min(128, max(2, (L - 1) div 512))
end,
InitTuple = list_to_tuple(lists:duplicate(SlotCount, [])), InitTuple = list_to_tuple(lists:duplicate(SlotCount, [])),
{MTC, SlotHashes} = {MTC, SlotHashes} =
timer:tc( timer:tc(

View file

@ -32,8 +32,12 @@
]). ]).
%% Exported for testing purposes %% Exported for testing purposes
-export([riak_metadata_to_binary/2, -export(
riak_extract_metadata/2]). [
riak_metadata_to_binary/2,
riak_extract_metadata/2,
get_indexes_from_siblingmetabin/2
]).
-define(MAGIC, 53). % riak_kv -> riak_object -define(MAGIC, 53). % riak_kv -> riak_object

View file

@ -74,7 +74,7 @@
% At o(10) trillion keys behaviour may become increasingly % At o(10) trillion keys behaviour may become increasingly
% difficult to predict. % difficult to predict.
-if(OTP_RELEASE >= 25). -if(?OTP_RELEASE >= 25).
-if(length(?LEVEL_SCALEFACTOR) /= ?MAX_LEVELS). -if(length(?LEVEL_SCALEFACTOR) /= ?MAX_LEVELS).
-error("length ?LEVEL_SCALEFACTOR differs from ?MAX_LEVELS"). -error("length ?LEVEL_SCALEFACTOR differs from ?MAX_LEVELS").
-endif. -endif.

View file

@ -9,6 +9,7 @@
-define(PEOPLE_INDEX, <<"people_bin">>). -define(PEOPLE_INDEX, <<"people_bin">>).
-define(MINI_QUERY_DIVISOR, 8). -define(MINI_QUERY_DIVISOR, 8).
-define(RGEX_QUERY_DIVISOR, 32). -define(RGEX_QUERY_DIVISOR, 32).
-define(PUT_PAUSE, 40).
-ifndef(performance). -ifndef(performance).
-define(performance, riak_ctperf). -define(performance, riak_ctperf).
@ -92,13 +93,13 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
fun(ListID) -> fun(ListID) ->
fun() -> fun() ->
RandInt = leveled_rand:uniform(IndexCount - 1), RandInt = leveled_rand:uniform(IndexCount - 1),
IntIndex = "integer" ++ integer_to_list(ListID) ++ "_int", IntIndex = ["integer", integer_to_list(ListID), "_int"],
BinIndex = "binary" ++ integer_to_list(ListID) ++ "_bin", BinIndex = ["binary", integer_to_list(ListID), "_bin"],
[{add, list_to_binary(IntIndex), RandInt}, [{add, iolist_to_binary(IntIndex), RandInt},
{add, ?PEOPLE_INDEX, list_to_binary(random_people_index())}, {add, ?PEOPLE_INDEX, list_to_binary(random_people_index())},
{add, list_to_binary(IntIndex), RandInt + 1}, {add, iolist_to_binary(IntIndex), RandInt + 1},
{add, list_to_binary(BinIndex), <<RandInt:32/integer>>}, {add, iolist_to_binary(BinIndex), <<RandInt:32/integer>>},
{add, list_to_binary(BinIndex), <<(RandInt + 1):32/integer>>}] {add, iolist_to_binary(BinIndex), <<(RandInt + 1):32/integer>>}]
end end
end, end,
@ -140,7 +141,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
GetMemoryTracker = memory_tracking(get, 1000), GetMemoryTracker = memory_tracking(get, 1000),
GetAccountant = accounting(get, 3000, ProfileList), GetAccountant = accounting(get, 3000, ProfileList),
TotalGetTime = TotalGetTime =
random_fetches(get, Bookie1, Bucket, KeyCount, GetFetches), random_fetches(riakget, Bookie1, Bucket, KeyCount div 2, GetFetches),
ok = stop_accounting(GetAccountant), ok = stop_accounting(GetAccountant),
{MT2, MP2, MB2} = stop_tracker(GetMemoryTracker), {MT2, MP2, MB2} = stop_tracker(GetMemoryTracker),
@ -235,7 +236,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
UpdateMemoryTracker = memory_tracking(update, 1000), UpdateMemoryTracker = memory_tracking(update, 1000),
UpdateAccountant = accounting(update, 1000, ProfileList), UpdateAccountant = accounting(update, 1000, ProfileList),
TotalUpdateTime = TotalUpdateTime =
rotate_chunk(Bookie1, <<"UpdBucket">>, KeyCount div 50, ObjSize), rotate_chunk(Bookie1, <<"UpdBucket">>, KeyCount div 100, ObjSize, 2),
ok = stop_accounting(UpdateAccountant), ok = stop_accounting(UpdateAccountant),
{MT6, MP6, MB6} = stop_tracker(UpdateMemoryTracker), {MT6, MP6, MB6} = stop_tracker(UpdateMemoryTracker),
@ -360,29 +361,103 @@ profile_app(Pids, ProfiledFun, P) ->
eprof:stop_profiling(), eprof:stop_profiling(),
eprof:log(atom_to_list(P) ++ ".log"), eprof:log(atom_to_list(P) ++ ".log"),
eprof:analyze(total, [{filter, [{time, 150000}]}]), eprof:analyze(total, [{filter, [{time, 160000}]}]),
eprof:stop(), eprof:stop(),
{ok, Analysis} = file:read_file(atom_to_list(P) ++ ".log"), {ok, Analysis} = file:read_file(atom_to_list(P) ++ ".log"),
io:format(user, "~n~s~n", [Analysis]) io:format(user, "~n~s~n", [Analysis])
. .
rotate_chunk(Bookie, Bucket, KeyCount, ObjSize) -> rotate_chunk(Bookie, Bucket, KeyCount, ObjSize, IdxCount) ->
ct:log( ct:log(
?INFO, ?INFO,
"Rotating an ObjList ~w - " "Rotating an ObjList ~w - "
"time includes object generation", "time includes object generation",
[KeyCount]), [KeyCount]),
V1 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
V2 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
V3 = base64:encode(leveled_rand:rand_bytes(ObjSize)),
{TC, ok} = {TC, ok} =
timer:tc( timer:tc(
fun() -> fun() ->
testutil:rotation_withnocheck( rotation_withnocheck(
Bookie, Bucket, KeyCount, V1, V2, V3) Bookie, Bucket, KeyCount, ObjSize, IdxCount
)
end), end),
TC div 1000. TC div 1000.
rotation_with_prefetch(_Book, _B, 0, _Value, _IdxCnt) ->
garbage_collect(),
ok;
rotation_with_prefetch(Book, B, Count, Value, IdxCnt) ->
H = erlang:phash2(Count),
H1 = H band 127,
H2 = (H bsr 7) band 127,
H3 = (H bsr 14) band 127,
H4 = (H bsr 21) band 63,
K = <<H1:8/integer, H2:8/integer, H3:8/integer, H4:8/integer>>,
IndexGen = testutil:get_randomindexes_generator(IdxCnt),
RemoveSpc =
case testutil:book_riakhead(Book, B, K) of
not_found ->
[];
{ok, Head} ->
{{SibMetaBin, _Vclock, _Hash, size}, _LMS}
= leveled_head:riak_extract_metadata(Head, size),
lists:map(
fun({Fld, Trm}) -> {add, Fld, Trm} end,
leveled_head:get_indexes_from_siblingmetabin(
SibMetaBin, []
)
)
end,
{O, DeltaSpecs} =
testutil:set_object(B, K, Value, IndexGen, RemoveSpc),
case testutil:book_riakput(Book, O, DeltaSpecs) of
ok ->
ok;
pause ->
timer:sleep(?PUT_PAUSE),
pause
end,
rotation_with_prefetch(Book, B, Count - 1, Value, IdxCnt).
rotation_withnocheck(Book, B, NumberOfObjects, ObjSize, IdxCnt) ->
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
rotation_with_prefetch(
Book,
B,
NumberOfObjects,
base64:encode(leveled_rand:rand_bytes(ObjSize)),
IdxCnt
),
ok.
generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) -> generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
testutil:generate_objects( testutil:generate_objects(
CountPerList, CountPerList,
@ -430,7 +505,7 @@ time_load_chunk(
ok -> ok ->
ThisProcess! {TC, 0}; ThisProcess! {TC, 0};
pause -> pause ->
timer:sleep(40), timer:sleep(?PUT_PAUSE),
ThisProcess ! {TC + 40000, 1} ThisProcess ! {TC + 40000, 1}
end end
end end
@ -487,10 +562,9 @@ counter(Bookie, estimate) ->
random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) -> random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) ->
KeysToFetch =
lists:map(
fun(I) ->
Twenty = ObjCount div 5, Twenty = ObjCount div 5,
KeyFun =
fun(I) ->
case I rem 5 of case I rem 5 of
1 -> 1 ->
testutil:fixed_bin_key( testutil:fixed_bin_key(
@ -499,22 +573,27 @@ random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) ->
testutil:fixed_bin_key(leveled_rand:uniform(Twenty)) testutil:fixed_bin_key(leveled_rand:uniform(Twenty))
end end
end, end,
lists:seq(1, Fetches)
),
{TC, ok} = {TC, ok} =
timer:tc( timer:tc(
fun() -> fun() ->
lists:foreach( lists:foreach(
fun(K) -> fun(I) ->
K = KeyFun(I),
{ok, _} = {ok, _} =
case FetchType of case FetchType of
riakget ->
{ok, _} =
testutil:book_riakhead(
Bookie, Bucket, K
),
testutil:book_riakget(Bookie, Bucket, K);
get -> get ->
testutil:book_riakget(Bookie, Bucket, K); testutil:book_riakget(Bookie, Bucket, K);
head -> head ->
testutil:book_riakhead(Bookie, Bucket, K) testutil:book_riakhead(Bookie, Bucket, K)
end end
end, end,
KeysToFetch lists:seq(1, Fetches)
) )
end end
), ),
@ -530,7 +609,7 @@ random_queries(Bookie, Bucket, IDs, IdxCnt, MaxRange, IndexesReturned) ->
fun() -> fun() ->
ID = leveled_rand:uniform(IDs), ID = leveled_rand:uniform(IDs),
BinIndex = BinIndex =
list_to_binary("binary" ++ integer_to_list(ID) ++ "_bin"), iolist_to_binary(["binary", integer_to_list(ID), "_bin"]),
Twenty = IdxCnt div 5, Twenty = IdxCnt div 5,
RI = leveled_rand:uniform(MaxRange), RI = leveled_rand:uniform(MaxRange),
[Start, End] = [Start, End] =
@ -615,21 +694,21 @@ profile_fun(
fun() -> fun() ->
random_queries( random_queries(
Bookie, Bucket, 10, IndexCount, QuerySize, Bookie, Bucket, 10, IndexCount, QuerySize,
IndexesReturned div ?MINI_QUERY_DIVISOR) (IndexesReturned * 2) div ?MINI_QUERY_DIVISOR)
end; end;
profile_fun( profile_fun(
{query, QuerySize}, {query, QuerySize},
{Bookie, Bucket, _KeyCount, _ObjSize, IndexCount, IndexesReturned}) -> {Bookie, Bucket, _KeyCount, _ObjSize, IndexCount, IndexesReturned}) ->
fun() -> fun() ->
random_queries( random_queries(
Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned) Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned * 2)
end; end;
profile_fun( profile_fun(
regex_query, regex_query,
{Bookie, Bucket, _KeyCount, _ObjSize, _IndexCount, IndexesReturned}) -> {Bookie, Bucket, _KeyCount, _ObjSize, _IndexCount, IndexesReturned}) ->
fun() -> fun() ->
random_people_queries( random_people_queries(
Bookie, Bucket, IndexesReturned div ?RGEX_QUERY_DIVISOR) Bookie, Bucket, (IndexesReturned * 2) div ?RGEX_QUERY_DIVISOR)
end; end;
profile_fun( profile_fun(
{head, HeadFetches}, {head, HeadFetches},
@ -655,7 +734,7 @@ profile_fun(
update, update,
{Bookie, _Bucket, KeyCount, ObjSize, _IndexCount, _IndexesReturned}) -> {Bookie, _Bucket, KeyCount, ObjSize, _IndexCount, _IndexesReturned}) ->
fun() -> fun() ->
rotate_chunk(Bookie, <<"ProfileB">>, KeyCount div 50, ObjSize) rotate_chunk(Bookie, <<"ProfileB">>, KeyCount div 100, ObjSize, 2)
end; end;
profile_fun( profile_fun(
CounterFold, CounterFold,
@ -665,7 +744,7 @@ profile_fun(
full -> full ->
20; 20;
estimate -> estimate ->
40; 50;
guess -> guess ->
100 100
end, end,

View file

@ -295,8 +295,11 @@ reset_filestructure(RootPath) when is_list(RootPath) ->
reset_filestructure(0, RootPath). reset_filestructure(0, RootPath).
reset_filestructure(Wait, RootPath) -> reset_filestructure(Wait, RootPath) ->
io:format("Waiting ~w ms to give a chance for all file closes " ++ io:format(
"to complete~n", [Wait]), "Waiting ~w ms to give a chance for all file closes "
"to complete~n",
[Wait]
),
timer:sleep(Wait), timer:sleep(Wait),
filelib:ensure_dir(RootPath ++ "/journal/"), filelib:ensure_dir(RootPath ++ "/journal/"),
filelib:ensure_dir(RootPath ++ "/ledger/"), filelib:ensure_dir(RootPath ++ "/ledger/"),
@ -311,8 +314,11 @@ wait_for_compaction(Bookie) ->
false -> false ->
false; false;
true -> true ->
io:format("Loop ~w waiting for journal " io:format(
++ "compaction to complete~n", [X]), "Loop ~w waiting for journal "
"compaction to complete~n",
[X]
),
timer:sleep(5000), timer:sleep(5000),
F(Bookie) F(Bookie)
end end, end end,
@ -446,11 +452,15 @@ get_compressiblevalue() ->
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4}, Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
{5, S5}, {6, S6}, {7, S7}, {8, S8}], {5, S5}, {6, S6}, {7, S7}, {8, S8}],
L = lists:seq(1, 1024), L = lists:seq(1, 1024),
lists:foldl(fun(_X, Acc) -> iolist_to_binary(
lists:foldl(
fun(_X, Acc) ->
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector), {_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
Acc ++ Str end, [Str|Acc] end,
"", [""],
L). L
)
).
generate_smallobjects(Count, KeyNumber) -> generate_smallobjects(Count, KeyNumber) ->
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(512)). generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(512)).
@ -547,16 +557,22 @@ set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, IndexesNotToRemove) -> set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, IndexesNotToRemove) ->
IdxSpecs = IndexGen(), IdxSpecs = IndexGen(),
Indexes = Indexes =
lists:map(fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end, lists:map(
IdxSpecs ++ IndexesNotToRemove), fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end,
lists:flatten([IndexesNotToRemove, IdxSpecs])
),
Obj = {Bucket, Obj = {Bucket,
Key, Key,
Value, Value,
IdxSpecs ++ lists:flatten(
lists:map(fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end, IdxSpecs,
Indexes2Remove), lists:map(
[{<<"MDK">>, "MDV" ++ Key}, fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end,
{<<"MDK2">>, "MDV" ++ Key}, Indexes2Remove
)
),
[{<<"MDK">>, iolist_to_binary([<<"MDV">>, Key])},
{<<"MDK2">>, iolist_to_binary([<<"MDV">>, Key])},
{?MD_LASTMOD, os:timestamp()}, {?MD_LASTMOD, os:timestamp()},
{?MD_INDEX, Indexes}]}, {?MD_INDEX, Indexes}]},
{B1, K1, V1, DeltaSpecs, MD} = Obj, {B1, K1, V1, DeltaSpecs, MD} = Obj,
@ -637,8 +653,10 @@ get_value(ObjectBin) ->
<<SibLength:32/integer, Rest2/binary>> = SibsBin, <<SibLength:32/integer, Rest2/binary>> = SibsBin,
<<ContentBin:SibLength/binary, _MetaBin/binary>> = Rest2, <<ContentBin:SibLength/binary, _MetaBin/binary>> = Rest2,
case ContentBin of case ContentBin of
<<0, ContentBin0/binary>> -> <<0:8/integer, ContentBin0/binary>> ->
binary_to_term(ContentBin0) binary_to_term(ContentBin0);
<<1:8/integer, ContentAsIs/binary>> ->
ContentAsIs
end; end;
N -> N ->
io:format("SibCount of ~w with ObjectBin ~w~n", [N, ObjectBin]), io:format("SibCount of ~w with ObjectBin ~w~n", [N, ObjectBin]),
@ -696,8 +714,8 @@ get_randomindexes_generator(Count) ->
lists:map( lists:map(
fun(X) -> fun(X) ->
{add, {add,
list_to_binary("idx" ++ integer_to_list(X) ++ "_bin"), iolist_to_binary(["idx", integer_to_list(X), "_bin"]),
list_to_binary(get_randomdate() ++ get_randomname())} iolist_to_binary([get_randomdate(), get_randomname()])}
end, end,
lists:seq(1, Count)) lists:seq(1, Count))
end, end,
@ -845,7 +863,7 @@ put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V) ->
end, end,
% Note that order in the SpecL is important, as % Note that order in the SpecL is important, as
% check_indexed_objects, needs to find the latest item added % check_indexed_objects, needs to find the latest item added
{{K, NewSpecs ++ AddSpc}, AccOut} {{K, lists:append(NewSpecs, AddSpc)}, AccOut}
end, end,
{RplKSpecL, Pauses} = lists:mapfoldl(MapFun, 0, KSpecL), {RplKSpecL, Pauses} = lists:mapfoldl(MapFun, 0, KSpecL),
io:format( io:format(
@ -939,16 +957,21 @@ compact_and_wait(Book) ->
compact_and_wait(Book, WaitForDelete) -> compact_and_wait(Book, WaitForDelete) ->
ok = leveled_bookie:book_compactjournal(Book, 30000), ok = leveled_bookie:book_compactjournal(Book, 30000),
F = fun leveled_bookie:book_islastcompactionpending/1, F = fun leveled_bookie:book_islastcompactionpending/1,
lists:foldl(fun(X, Pending) -> lists:foldl(
fun(X, Pending) ->
case Pending of case Pending of
false -> false ->
false; false;
true -> true ->
io:format("Loop ~w waiting for journal " io:format(
++ "compaction to complete~n", [X]), "Loop ~w waiting for journal "
"compaction to complete~n",
[X]
),
timer:sleep(20000), timer:sleep(20000),
F(Book) F(Book)
end end, end
end,
true, true,
lists:seq(1, 15)), lists:seq(1, 15)),
io:format("Waiting for journal deletes~n"), io:format("Waiting for journal deletes~n"),