Improve perf_SUITE test (#445)
* Improve perf_SUITE test The update teat is refactored so as not to generate. a large KV list which dominates the memory utilisation. The update and the get tests changes to do a head before each operation - which emulates how this will work in RIAK. * Revert default setting change * Don't pre-calculate key list For fetches - reduces memory required for test process not database (and consequent distortion to measured results) * Tidy ++ in tests Removes some rogue results from profile * Update testutil.erl * Test fixes * Tidy generate_chunk for profiling * Revert "Tidy generate_chunk for profiling" This reverts commit 1f6cff446ca6b9855f1e3aa732b32e0e5c14c9a5. * Resize profile test
This commit is contained in:
parent
7b5b18ed06
commit
acf30599e9
5 changed files with 181 additions and 81 deletions
|
@ -295,8 +295,11 @@ reset_filestructure(RootPath) when is_list(RootPath) ->
|
|||
reset_filestructure(0, RootPath).
|
||||
|
||||
reset_filestructure(Wait, RootPath) ->
|
||||
io:format("Waiting ~w ms to give a chance for all file closes " ++
|
||||
"to complete~n", [Wait]),
|
||||
io:format(
|
||||
"Waiting ~w ms to give a chance for all file closes "
|
||||
"to complete~n",
|
||||
[Wait]
|
||||
),
|
||||
timer:sleep(Wait),
|
||||
filelib:ensure_dir(RootPath ++ "/journal/"),
|
||||
filelib:ensure_dir(RootPath ++ "/ledger/"),
|
||||
|
@ -311,8 +314,11 @@ wait_for_compaction(Bookie) ->
|
|||
false ->
|
||||
false;
|
||||
true ->
|
||||
io:format("Loop ~w waiting for journal "
|
||||
++ "compaction to complete~n", [X]),
|
||||
io:format(
|
||||
"Loop ~w waiting for journal "
|
||||
"compaction to complete~n",
|
||||
[X]
|
||||
),
|
||||
timer:sleep(5000),
|
||||
F(Bookie)
|
||||
end end,
|
||||
|
@ -446,11 +452,15 @@ get_compressiblevalue() ->
|
|||
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
|
||||
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
|
||||
L = lists:seq(1, 1024),
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
|
||||
Acc ++ Str end,
|
||||
"",
|
||||
L).
|
||||
iolist_to_binary(
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
|
||||
[Str|Acc] end,
|
||||
[""],
|
||||
L
|
||||
)
|
||||
).
|
||||
|
||||
generate_smallobjects(Count, KeyNumber) ->
|
||||
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(512)).
|
||||
|
@ -547,16 +557,22 @@ set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
|
|||
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, IndexesNotToRemove) ->
|
||||
IdxSpecs = IndexGen(),
|
||||
Indexes =
|
||||
lists:map(fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end,
|
||||
IdxSpecs ++ IndexesNotToRemove),
|
||||
lists:map(
|
||||
fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end,
|
||||
lists:flatten([IndexesNotToRemove, IdxSpecs])
|
||||
),
|
||||
Obj = {Bucket,
|
||||
Key,
|
||||
Value,
|
||||
IdxSpecs ++
|
||||
lists:map(fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end,
|
||||
Indexes2Remove),
|
||||
[{<<"MDK">>, "MDV" ++ Key},
|
||||
{<<"MDK2">>, "MDV" ++ Key},
|
||||
lists:flatten(
|
||||
IdxSpecs,
|
||||
lists:map(
|
||||
fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end,
|
||||
Indexes2Remove
|
||||
)
|
||||
),
|
||||
[{<<"MDK">>, iolist_to_binary([<<"MDV">>, Key])},
|
||||
{<<"MDK2">>, iolist_to_binary([<<"MDV">>, Key])},
|
||||
{?MD_LASTMOD, os:timestamp()},
|
||||
{?MD_INDEX, Indexes}]},
|
||||
{B1, K1, V1, DeltaSpecs, MD} = Obj,
|
||||
|
@ -637,8 +653,10 @@ get_value(ObjectBin) ->
|
|||
<<SibLength:32/integer, Rest2/binary>> = SibsBin,
|
||||
<<ContentBin:SibLength/binary, _MetaBin/binary>> = Rest2,
|
||||
case ContentBin of
|
||||
<<0, ContentBin0/binary>> ->
|
||||
binary_to_term(ContentBin0)
|
||||
<<0:8/integer, ContentBin0/binary>> ->
|
||||
binary_to_term(ContentBin0);
|
||||
<<1:8/integer, ContentAsIs/binary>> ->
|
||||
ContentAsIs
|
||||
end;
|
||||
N ->
|
||||
io:format("SibCount of ~w with ObjectBin ~w~n", [N, ObjectBin]),
|
||||
|
@ -696,8 +714,8 @@ get_randomindexes_generator(Count) ->
|
|||
lists:map(
|
||||
fun(X) ->
|
||||
{add,
|
||||
list_to_binary("idx" ++ integer_to_list(X) ++ "_bin"),
|
||||
list_to_binary(get_randomdate() ++ get_randomname())}
|
||||
iolist_to_binary(["idx", integer_to_list(X), "_bin"]),
|
||||
iolist_to_binary([get_randomdate(), get_randomname()])}
|
||||
end,
|
||||
lists:seq(1, Count))
|
||||
end,
|
||||
|
@ -845,7 +863,7 @@ put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V) ->
|
|||
end,
|
||||
% Note that order in the SpecL is important, as
|
||||
% check_indexed_objects, needs to find the latest item added
|
||||
{{K, NewSpecs ++ AddSpc}, AccOut}
|
||||
{{K, lists:append(NewSpecs, AddSpc)}, AccOut}
|
||||
end,
|
||||
{RplKSpecL, Pauses} = lists:mapfoldl(MapFun, 0, KSpecL),
|
||||
io:format(
|
||||
|
@ -939,17 +957,22 @@ compact_and_wait(Book) ->
|
|||
compact_and_wait(Book, WaitForDelete) ->
|
||||
ok = leveled_bookie:book_compactjournal(Book, 30000),
|
||||
F = fun leveled_bookie:book_islastcompactionpending/1,
|
||||
lists:foldl(fun(X, Pending) ->
|
||||
case Pending of
|
||||
false ->
|
||||
false;
|
||||
true ->
|
||||
io:format("Loop ~w waiting for journal "
|
||||
++ "compaction to complete~n", [X]),
|
||||
timer:sleep(20000),
|
||||
F(Book)
|
||||
end end,
|
||||
true,
|
||||
lists:seq(1, 15)),
|
||||
lists:foldl(
|
||||
fun(X, Pending) ->
|
||||
case Pending of
|
||||
false ->
|
||||
false;
|
||||
true ->
|
||||
io:format(
|
||||
"Loop ~w waiting for journal "
|
||||
"compaction to complete~n",
|
||||
[X]
|
||||
),
|
||||
timer:sleep(20000),
|
||||
F(Book)
|
||||
end
|
||||
end,
|
||||
true,
|
||||
lists:seq(1, 15)),
|
||||
io:format("Waiting for journal deletes~n"),
|
||||
timer:sleep(WaitForDelete).
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue