Merge branch 'develop-3.1' into develop-3.4
This commit is contained in:
commit
e417bb4743
6 changed files with 727 additions and 378 deletions
|
@ -21,9 +21,9 @@
|
|||
]},
|
||||
{test, [{extra_src_dirs, ["test/end_to_end", "test/property"]}
|
||||
]},
|
||||
{perf_full, [{erl_opts, [{d, perf_full}]}]},
|
||||
{perf_mini, [{erl_opts, [{d, perf_mini}]}]},
|
||||
{perf_prof, [{erl_opts, [{d, perf_prof}]}]}
|
||||
{perf_full, [{erl_opts, [{d, performance, riak_fullperf}]}]},
|
||||
{perf_mini, [{erl_opts, [{d, performance, riak_miniperf}]}]},
|
||||
{perf_prof, [{erl_opts, [{d, performance, riak_profileperf}]}]}
|
||||
]}.
|
||||
|
||||
{deps, [
|
||||
|
|
|
@ -213,11 +213,12 @@ breaking_folds(_Config) ->
|
|||
% Find all keys index, and then same again but stop at a midpoint using a
|
||||
% throw
|
||||
{async, IdxFolder} =
|
||||
leveled_bookie:book_indexfold(Bookie1,
|
||||
list_to_binary("Bucket"),
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{true, undefined}),
|
||||
leveled_bookie:book_indexfold(
|
||||
Bookie1,
|
||||
list_to_binary("Bucket"),
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{true, undefined}),
|
||||
KeyList1 = lists:reverse(IdxFolder()),
|
||||
io:format("Index fold with result size ~w~n", [length(KeyList1)]),
|
||||
true = KeyCount == length(KeyList1),
|
||||
|
@ -235,11 +236,12 @@ breaking_folds(_Config) ->
|
|||
end
|
||||
end,
|
||||
{async, IdxFolderToMidK} =
|
||||
leveled_bookie:book_indexfold(Bookie1,
|
||||
list_to_binary("Bucket"),
|
||||
{FoldKeyThrowFun, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{true, undefined}),
|
||||
leveled_bookie:book_indexfold(
|
||||
Bookie1,
|
||||
list_to_binary("Bucket"),
|
||||
{FoldKeyThrowFun, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{true, undefined}),
|
||||
CatchingFold =
|
||||
fun(AsyncFolder) ->
|
||||
try
|
||||
|
@ -261,10 +263,8 @@ breaking_folds(_Config) ->
|
|||
[{K, Size}|Acc]
|
||||
end,
|
||||
{async, HeadFolder} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{HeadFoldFun, []},
|
||||
true, true, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1, ?RIAK_TAG, {HeadFoldFun, []}, true, true, false),
|
||||
KeySizeList1 = lists:reverse(HeadFolder()),
|
||||
io:format("Head fold with result size ~w~n", [length(KeySizeList1)]),
|
||||
true = KeyCount == length(KeySizeList1),
|
||||
|
@ -472,11 +472,9 @@ small_load_with2i(_Config) ->
|
|||
testutil:check_forobject(Bookie1, TestObject),
|
||||
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
||||
IndexGen = testutil:get_randomindexes_generator(8),
|
||||
ObjL1 = testutil:generate_objects(10000,
|
||||
uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(
|
||||
10000, uuid, [], ObjectGen, IndexGen),
|
||||
testutil:riakload(Bookie1, ObjL1),
|
||||
ChkList1 = lists:sublist(lists:sort(ObjL1), 100),
|
||||
testutil:check_forlist(Bookie1, ChkList1),
|
||||
|
@ -486,7 +484,7 @@ small_load_with2i(_Config) ->
|
|||
IdxQ1 = {index_query,
|
||||
"Bucket",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{true, undefined}},
|
||||
{async, IdxFolder} = leveled_bookie:book_returnfolder(Bookie1, IdxQ1),
|
||||
KeyList1 = lists:usort(IdxFolder()),
|
||||
|
@ -495,7 +493,7 @@ small_load_with2i(_Config) ->
|
|||
IdxQ2 = {index_query,
|
||||
{"Bucket", LastKey},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", LastTerm, "|"},
|
||||
{<<"idx1_bin">>, LastTerm, <<"|">>},
|
||||
{false, undefined}},
|
||||
{async, IdxFolderLK} = leveled_bookie:book_returnfolder(Bookie1, IdxQ2),
|
||||
KeyList2 = lists:usort(IdxFolderLK()),
|
||||
|
@ -530,12 +528,14 @@ small_load_with2i(_Config) ->
|
|||
{FoldObjectsFun, []},
|
||||
false),
|
||||
KeyHashList2 = HTreeF2(),
|
||||
{async, HTreeF3} = leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
"Bucket",
|
||||
{"idx1_bin", "#", "|"},
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
{async, HTreeF3} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
"Bucket",
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
KeyHashList3 = HTreeF3(),
|
||||
true = 9901 == length(KeyHashList1), % also includes the test object
|
||||
true = 9900 == length(KeyHashList2),
|
||||
|
@ -585,96 +585,86 @@ small_load_with2i(_Config) ->
|
|||
|
||||
query_count(_Config) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
{ok, Book1} = leveled_bookie:book_start(RootPath,
|
||||
2000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
{ok, Book1} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 2000, 50000000, testutil:sync_strategy()),
|
||||
BucketBin = list_to_binary("Bucket"),
|
||||
{TestObject, TestSpec} = testutil:generate_testobject(BucketBin,
|
||||
term_to_binary("Key1"),
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]),
|
||||
{TestObject, TestSpec} =
|
||||
testutil:generate_testobject(
|
||||
BucketBin, term_to_binary("Key1"), "Value1", [], [{"MDK1", "MDV1"}]),
|
||||
ok = testutil:book_riakput(Book1, TestObject, TestSpec),
|
||||
testutil:check_forobject(Book1, TestObject),
|
||||
testutil:check_formissingobject(Book1, "Bucket1", "Key2"),
|
||||
testutil:check_forobject(Book1, TestObject),
|
||||
lists:foreach(fun(_X) ->
|
||||
V = testutil:get_compressiblevalue(),
|
||||
Indexes = testutil:get_randomindexes_generator(8),
|
||||
SW = os:timestamp(),
|
||||
ObjL1 = testutil:generate_objects(10000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V,
|
||||
Indexes),
|
||||
testutil:riakload(Book1, ObjL1),
|
||||
io:format("Put of 10000 objects with 8 index entries "
|
||||
++
|
||||
"each completed in ~w microseconds~n",
|
||||
[timer:now_diff(os:timestamp(), SW)])
|
||||
end,
|
||||
lists:seq(1, 8)),
|
||||
lists:foreach(
|
||||
fun(_X) ->
|
||||
V = testutil:get_compressiblevalue(),
|
||||
Indexes = testutil:get_randomindexes_generator(8),
|
||||
SW = os:timestamp(),
|
||||
ObjL1 = testutil:generate_objects(10000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V,
|
||||
Indexes),
|
||||
testutil:riakload(Book1, ObjL1),
|
||||
io:format(
|
||||
"Put of 10000 objects with 8 index entries "
|
||||
"each completed in ~w microseconds~n",
|
||||
[timer:now_diff(os:timestamp(), SW)])
|
||||
end,
|
||||
lists:seq(1, 8)),
|
||||
testutil:check_forobject(Book1, TestObject),
|
||||
Total = lists:foldl(fun(X, Acc) ->
|
||||
IdxF = "idx" ++ integer_to_list(X) ++ "_bin",
|
||||
T = count_termsonindex(BucketBin,
|
||||
IdxF,
|
||||
Book1,
|
||||
?KEY_ONLY),
|
||||
io:format("~w terms found on index ~s~n",
|
||||
[T, IdxF]),
|
||||
Acc + T
|
||||
end,
|
||||
0,
|
||||
lists:seq(1, 8)),
|
||||
ok = case Total of
|
||||
640000 ->
|
||||
ok
|
||||
Total =
|
||||
lists:foldl(
|
||||
fun(X, Acc) ->
|
||||
IdxF = "idx" ++ integer_to_list(X) ++ "_bin",
|
||||
T =
|
||||
count_termsonindex(
|
||||
BucketBin, list_to_binary(IdxF), Book1, ?KEY_ONLY),
|
||||
io:format("~w terms found on index ~s~n", [T, IdxF]),
|
||||
Acc + T
|
||||
end,
|
||||
Index1Count = count_termsonindex(BucketBin,
|
||||
"idx1_bin",
|
||||
Book1,
|
||||
?KEY_ONLY),
|
||||
0,
|
||||
lists:seq(1, 8)),
|
||||
true = Total == 640000,
|
||||
Index1Count =
|
||||
count_termsonindex(
|
||||
BucketBin, <<"idx1_bin">>, Book1, ?KEY_ONLY),
|
||||
ok = leveled_bookie:book_close(Book1),
|
||||
{ok, Book2} = leveled_bookie:book_start(RootPath,
|
||||
1000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
Index1Count = count_termsonindex(BucketBin,
|
||||
"idx1_bin",
|
||||
Book2,
|
||||
?KEY_ONLY),
|
||||
{ok, Book2} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 1000, 50000000, testutil:sync_strategy()),
|
||||
Index1Count =
|
||||
count_termsonindex(
|
||||
BucketBin, <<"idx1_bin">>, Book2, ?KEY_ONLY),
|
||||
NameList = testutil:name_list(),
|
||||
TotalNameByName = lists:foldl(fun({_X, Name}, Acc) ->
|
||||
{ok, Regex} = re:compile("[0-9]+" ++
|
||||
Name),
|
||||
SW = os:timestamp(),
|
||||
T = count_termsonindex(BucketBin,
|
||||
"idx1_bin",
|
||||
Book2,
|
||||
{false,
|
||||
Regex}),
|
||||
TD = timer:now_diff(os:timestamp(),
|
||||
SW),
|
||||
io:format("~w terms found on " ++
|
||||
"index idx1 with a " ++
|
||||
"regex in ~w " ++
|
||||
"microseconds~n",
|
||||
[T, TD]),
|
||||
Acc + T
|
||||
end,
|
||||
0,
|
||||
NameList),
|
||||
ok = case TotalNameByName of
|
||||
Index1Count ->
|
||||
ok
|
||||
TotalNameByName =
|
||||
lists:foldl(
|
||||
fun({_X, Name}, Acc) ->
|
||||
{ok, Regex} =
|
||||
re:compile("[0-9]+" ++ Name),
|
||||
SW = os:timestamp(),
|
||||
T =
|
||||
count_termsonindex(
|
||||
BucketBin,
|
||||
list_to_binary("idx1_bin"),
|
||||
Book2,
|
||||
{false, Regex}),
|
||||
TD = timer:now_diff(os:timestamp(), SW),
|
||||
io:format(
|
||||
"~w terms found on index idx1 with a "
|
||||
"regex in ~w microseconds~n",
|
||||
[T, TD]),
|
||||
Acc + T
|
||||
end,
|
||||
0,
|
||||
NameList),
|
||||
true = TotalNameByName == Index1Count,
|
||||
{ok, RegMia} = re:compile("[0-9]+Mia"),
|
||||
Query1 = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx2_bin", "2000", "2000|"},
|
||||
{<<"idx2_bin">>, <<"2000">>, <<"2000|">>},
|
||||
{false, RegMia}},
|
||||
{async,
|
||||
Mia2KFolder1} = leveled_bookie:book_returnfolder(Book2, Query1),
|
||||
|
@ -682,7 +672,7 @@ query_count(_Config) ->
|
|||
Query2 = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx2_bin", "2000", "2001"},
|
||||
{<<"idx2_bin">>, <<"2000">>, <<"2001">>},
|
||||
{true, undefined}},
|
||||
{async,
|
||||
Mia2KFolder2} = leveled_bookie:book_returnfolder(Book2, Query2),
|
||||
|
@ -705,7 +695,7 @@ query_count(_Config) ->
|
|||
Query3 = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx2_bin", "1980", "2100"},
|
||||
{<<"idx2_bin">>, <<"1980">>, <<"2100">>},
|
||||
{false, RxMia2K}},
|
||||
{async,
|
||||
Mia2KFolder3} = leveled_bookie:book_returnfolder(Book2, Query3),
|
||||
|
@ -713,26 +703,26 @@ query_count(_Config) ->
|
|||
|
||||
V9 = testutil:get_compressiblevalue(),
|
||||
Indexes9 = testutil:get_randomindexes_generator(8),
|
||||
[{_RN, Obj9, Spc9}] = testutil:generate_objects(1,
|
||||
binary_uuid,
|
||||
[],
|
||||
V9,
|
||||
Indexes9),
|
||||
[{_RN, Obj9, Spc9}] =
|
||||
testutil:generate_objects(
|
||||
1, binary_uuid, [], V9, Indexes9),
|
||||
ok = testutil:book_riakput(Book2, Obj9, Spc9),
|
||||
R9 = lists:map(fun({add, IdxF, IdxT}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book2, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
X when X > 0 ->
|
||||
{IdxF, IdxT, X}
|
||||
end
|
||||
end,
|
||||
Spc9),
|
||||
R9 =
|
||||
lists:map(
|
||||
fun({add, IdxF, IdxT}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book2, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
X when X > 0 ->
|
||||
{IdxF, IdxT, X}
|
||||
end
|
||||
end,
|
||||
Spc9),
|
||||
Spc9Del = lists:map(fun({add, IdxF, IdxT}) -> {remove, IdxF, IdxT} end,
|
||||
Spc9),
|
||||
ok = testutil:book_riakput(Book2, Obj9, Spc9Del),
|
||||
|
@ -751,44 +741,44 @@ query_count(_Config) ->
|
|||
end,
|
||||
R9),
|
||||
ok = leveled_bookie:book_close(Book2),
|
||||
{ok, Book3} = leveled_bookie:book_start(RootPath,
|
||||
2000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
lists:foreach(fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book3, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
Y ->
|
||||
Y = X - 1
|
||||
end
|
||||
end,
|
||||
R9),
|
||||
{ok, Book3} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 2000, 50000000, testutil:sync_strategy()),
|
||||
lists:foreach(
|
||||
fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book3, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
Y ->
|
||||
Y = X - 1
|
||||
end
|
||||
end,
|
||||
R9),
|
||||
ok = testutil:book_riakput(Book3, Obj9, Spc9),
|
||||
ok = leveled_bookie:book_close(Book3),
|
||||
{ok, Book4} = leveled_bookie:book_start(RootPath,
|
||||
2000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
lists:foreach(fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book4, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
X ->
|
||||
ok
|
||||
end
|
||||
end,
|
||||
R9),
|
||||
{ok, Book4} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 2000, 50000000, testutil:sync_strategy()),
|
||||
lists:foreach(
|
||||
fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book4, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
X ->
|
||||
ok
|
||||
end
|
||||
end,
|
||||
R9),
|
||||
testutil:check_forobject(Book4, TestObject),
|
||||
|
||||
FoldBucketsFun = fun(B, Acc) -> sets:add_element(B, Acc) end,
|
||||
|
@ -803,24 +793,15 @@ query_count(_Config) ->
|
|||
|
||||
true = sets:size(BucketSet1) == 1,
|
||||
|
||||
ObjList10A = testutil:generate_objects(5000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V9,
|
||||
Indexes9,
|
||||
"BucketA"),
|
||||
ObjList10B = testutil:generate_objects(5000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V9,
|
||||
Indexes9,
|
||||
"BucketB"),
|
||||
ObjList10C = testutil:generate_objects(5000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V9,
|
||||
Indexes9,
|
||||
"BucketC"),
|
||||
ObjList10A =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketA"),
|
||||
ObjList10B =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketB"),
|
||||
ObjList10C =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketC"),
|
||||
testutil:riakload(Book4, ObjList10A),
|
||||
testutil:riakload(Book4, ObjList10B),
|
||||
testutil:riakload(Book4, ObjList10C),
|
||||
|
@ -847,31 +828,30 @@ query_count(_Config) ->
|
|||
ok = leveled_bookie:book_close(Book5),
|
||||
|
||||
testutil:reset_filestructure().
|
||||
|
||||
|
||||
|
||||
count_termsonindex(Bucket, IdxField, Book, QType) ->
|
||||
lists:foldl(fun(X, Acc) ->
|
||||
SW = os:timestamp(),
|
||||
ST = integer_to_list(X),
|
||||
ET = ST ++ "|",
|
||||
Q = {index_query,
|
||||
Bucket,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxField, ST, ET},
|
||||
QType},
|
||||
R = leveled_bookie:book_returnfolder(Book, Q),
|
||||
{async, Folder} = R,
|
||||
Items = length(Folder()),
|
||||
io:format("2i query from term ~s on index ~s took " ++
|
||||
"~w microseconds~n",
|
||||
[ST,
|
||||
IdxField,
|
||||
timer:now_diff(os:timestamp(), SW)]),
|
||||
Acc + Items
|
||||
end,
|
||||
0,
|
||||
lists:seq(190, 221)).
|
||||
lists:foldl(
|
||||
fun(X, Acc) ->
|
||||
SW = os:timestamp(),
|
||||
ST = list_to_binary(integer_to_list(X)),
|
||||
Pipe = <<"|">>,
|
||||
ET = <<ST/binary, Pipe/binary>>,
|
||||
Q = {index_query,
|
||||
Bucket,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxField, ST, ET},
|
||||
QType},
|
||||
R = leveled_bookie:book_returnfolder(Book, Q),
|
||||
{async, Folder} = R,
|
||||
Items = length(Folder()),
|
||||
io:format(
|
||||
"2i query from term ~s on index ~s took ~w microseconds~n",
|
||||
[ST, IdxField, timer:now_diff(os:timestamp(), SW)]),
|
||||
Acc + Items
|
||||
end,
|
||||
0,
|
||||
lists:seq(190, 221)).
|
||||
|
||||
multibucket_fold(_Config) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
|
|
|
@ -6,18 +6,20 @@
|
|||
riak_ctperf/1, riak_fullperf/1, riak_profileperf/1, riak_miniperf/1
|
||||
]).
|
||||
|
||||
-ifdef(perf_full).
|
||||
all() -> [riak_fullperf].
|
||||
-define(PEOPLE_INDEX, <<"people_bin">>).
|
||||
-define(MINI_QUERY_DIVISOR, 8).
|
||||
-define(RGEX_QUERY_DIVISOR, 32).
|
||||
|
||||
-ifndef(performance).
|
||||
-define(performance, riak_ctperf).
|
||||
-endif.
|
||||
all() -> [?performance].
|
||||
|
||||
-if(?performance == riak_profileperf andalso ?OTP_RELEASE >= 24).
|
||||
% Requires map functions from OTP 24
|
||||
-define(ACCOUNTING, true).
|
||||
-else.
|
||||
-ifdef(perf_mini).
|
||||
all() -> [riak_miniperf].
|
||||
-else.
|
||||
-ifdef(perf_prof).
|
||||
all() -> [riak_profileperf].
|
||||
-else.
|
||||
all() -> [riak_ctperf].
|
||||
-endif.
|
||||
-endif.
|
||||
-define(ACCOUNTING, false).
|
||||
-endif.
|
||||
|
||||
suite() -> [{timetrap, {hours, 16}}].
|
||||
|
@ -42,16 +44,16 @@ riak_fullperf(ObjSize, PM, LC) ->
|
|||
output_result(R5A),
|
||||
R5B = riak_load_tester(Bucket, 5000000, ObjSize, [], PM, LC),
|
||||
output_result(R5B),
|
||||
R10 = riak_load_tester(Bucket, 10000000, ObjSize, [], PM, LC),
|
||||
R10 = riak_load_tester(Bucket, 8000000, ObjSize, [], PM, LC),
|
||||
output_result(R10)
|
||||
.
|
||||
|
||||
riak_profileperf(_Config) ->
|
||||
riak_load_tester(
|
||||
{<<"SensibleBucketTypeName">>, <<"SensibleBucketName0">>},
|
||||
2000000,
|
||||
1200000,
|
||||
2048,
|
||||
[load, full],
|
||||
[load, head, get, query, mini_query, regex_query, full, guess, estimate, update],
|
||||
zstd,
|
||||
as_store
|
||||
).
|
||||
|
@ -66,6 +68,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
"Basic riak test with KeyCount ~w ObjSize ~w PressMethod ~w Ledger ~w",
|
||||
[KeyCount, ObjSize, PM, LC]
|
||||
),
|
||||
|
||||
IndexCount = 100000,
|
||||
|
||||
GetFetches = KeyCount div 4,
|
||||
|
@ -92,6 +95,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
IntIndex = "integer" ++ integer_to_list(ListID) ++ "_int",
|
||||
BinIndex = "binary" ++ integer_to_list(ListID) ++ "_bin",
|
||||
[{add, list_to_binary(IntIndex), RandInt},
|
||||
{add, ?PEOPLE_INDEX, list_to_binary(random_people_index())},
|
||||
{add, list_to_binary(IntIndex), RandInt + 1},
|
||||
{add, list_to_binary(BinIndex), <<RandInt:32/integer>>},
|
||||
{add, list_to_binary(BinIndex), <<(RandInt + 1):32/integer>>}]
|
||||
|
@ -100,6 +104,8 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
|
||||
CountPerList = KeyCount div 10,
|
||||
|
||||
LoadMemoryTracker = memory_tracking(load, 1000),
|
||||
LoadAccountant = accounting(load, 10000, ProfileList),
|
||||
TC4 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 4),
|
||||
TC1 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 1),
|
||||
TC9 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 9),
|
||||
|
@ -110,6 +116,8 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
TC3 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 3),
|
||||
TC7 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 7),
|
||||
TC10 = load_chunk(Bookie1, CountPerList, ObjSize, IndexGenFun, Bucket, 10),
|
||||
ok = stop_accounting(LoadAccountant),
|
||||
{MT0, MP0, MB0} = stop_tracker(LoadMemoryTracker),
|
||||
|
||||
ct:log(
|
||||
?INFO,
|
||||
|
@ -122,20 +130,23 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
(TC1 + TC2 + TC3 + TC4 + TC5 + TC6 + TC7 + TC8 + TC9 + TC10) div 1000,
|
||||
ct:log(?INFO, "Total load time ~w ms", [TotalLoadTime]),
|
||||
|
||||
{MT0, MP0, MB0} = memory_usage(),
|
||||
|
||||
HeadMemoryTracker = memory_tracking(head, 1000),
|
||||
HeadAccountant = accounting(head, 2000, ProfileList),
|
||||
TotalHeadTime =
|
||||
random_fetches(head, Bookie1, Bucket, KeyCount, HeadFetches),
|
||||
|
||||
{MT1, MP1, MB1} = memory_usage(),
|
||||
ok = stop_accounting(HeadAccountant),
|
||||
{MT1, MP1, MB1} = stop_tracker(HeadMemoryTracker),
|
||||
|
||||
GetMemoryTracker = memory_tracking(get, 1000),
|
||||
GetAccountant = accounting(get, 3000, ProfileList),
|
||||
TotalGetTime =
|
||||
random_fetches(get, Bookie1, Bucket, KeyCount, GetFetches),
|
||||
ok = stop_accounting(GetAccountant),
|
||||
{MT2, MP2, MB2} = stop_tracker(GetMemoryTracker),
|
||||
|
||||
{MT2, MP2, MB2} = memory_usage(),
|
||||
|
||||
QueryMemoryTracker = memory_tracking(query, 1000),
|
||||
QueryAccountant = accounting(query, 1000, ProfileList),
|
||||
QuerySize = max(10, IndexCount div 1000),
|
||||
MiniQuerySize = max(1, IndexCount div 50000),
|
||||
TotalQueryTime =
|
||||
random_queries(
|
||||
Bookie1,
|
||||
|
@ -144,6 +155,12 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
IndexCount,
|
||||
QuerySize,
|
||||
IndexesReturned),
|
||||
ok = stop_accounting(QueryAccountant),
|
||||
{MT3a, MP3a, MB3a} = stop_tracker(QueryMemoryTracker),
|
||||
|
||||
MiniQueryMemoryTracker = memory_tracking(mini_query, 1000),
|
||||
MiniQueryAccountant = accounting(mini_query, 1000, ProfileList),
|
||||
MiniQuerySize = max(1, IndexCount div 50000),
|
||||
TotalMiniQueryTime =
|
||||
random_queries(
|
||||
Bookie1,
|
||||
|
@ -151,18 +168,76 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
10,
|
||||
IndexCount,
|
||||
MiniQuerySize,
|
||||
IndexesReturned div 8),
|
||||
IndexesReturned div ?MINI_QUERY_DIVISOR),
|
||||
ok = stop_accounting(MiniQueryAccountant),
|
||||
{MT3b, MP3b, MB3b} = stop_tracker(MiniQueryMemoryTracker),
|
||||
|
||||
{MT3, MP3, MB3} = memory_usage(),
|
||||
RegexQueryMemoryTracker = memory_tracking(regex_query, 1000),
|
||||
RegexQueryAccountant = accounting(regex_query, 2000, ProfileList),
|
||||
RegexQueryTime =
|
||||
random_people_queries(
|
||||
Bookie1,
|
||||
Bucket,
|
||||
IndexesReturned div ?RGEX_QUERY_DIVISOR),
|
||||
ok = stop_accounting(RegexQueryAccountant),
|
||||
{MT3c, MP3c, MB3c} = stop_tracker(RegexQueryMemoryTracker),
|
||||
|
||||
{FullFoldTime, SegFoldTime} = size_estimate_summary(Bookie1),
|
||||
GuessMemoryTracker = memory_tracking(guess, 1000),
|
||||
GuessAccountant = accounting(guess, 1000, ProfileList),
|
||||
{GuessTime, GuessCount} =
|
||||
lists:foldl(
|
||||
fun(_I, {TSAcc, CountAcc}) ->
|
||||
{TS, Count} = counter(Bookie1, guess),
|
||||
{TSAcc + TS, CountAcc + Count}
|
||||
end,
|
||||
{0, 0},
|
||||
lists:seq(1, 60)
|
||||
),
|
||||
ok = stop_accounting(GuessAccountant),
|
||||
{MT4a, MP4a, MB4a} = stop_tracker(GuessMemoryTracker),
|
||||
|
||||
{MT4, MP4, MB4} = memory_usage(),
|
||||
EstimateMemoryTracker = memory_tracking(estimate, 1000),
|
||||
EstimateAccountant = accounting(estimate, 1000, ProfileList),
|
||||
{EstimateTime, EstimateCount} =
|
||||
lists:foldl(
|
||||
fun(_I, {TSAcc, CountAcc}) ->
|
||||
{TS, Count} = counter(Bookie1, estimate),
|
||||
{TSAcc + TS, CountAcc + Count}
|
||||
end,
|
||||
{0, 0},
|
||||
lists:seq(1, 40)
|
||||
),
|
||||
ok = stop_accounting(EstimateAccountant),
|
||||
{MT4b, MP4b, MB4b} = stop_tracker(EstimateMemoryTracker),
|
||||
|
||||
SegFoldTime = (GuessTime + EstimateTime) div 1000,
|
||||
|
||||
FullFoldMemoryTracker = memory_tracking(full, 1000),
|
||||
FullFoldAccountant = accounting(full, 2000, ProfileList),
|
||||
{FullFoldTime, FullFoldCount} =
|
||||
lists:foldl(
|
||||
fun(_I, {TSAcc, CountAcc}) ->
|
||||
{TS, Count} = counter(Bookie1, full),
|
||||
{TSAcc + TS, CountAcc + Count}
|
||||
end,
|
||||
{0, 0},
|
||||
lists:seq(1, 5)
|
||||
),
|
||||
ok = stop_accounting(FullFoldAccountant),
|
||||
{MT5, MP5, MB5} = stop_tracker(FullFoldMemoryTracker),
|
||||
|
||||
ct:log(
|
||||
info,
|
||||
"Guess size ~w Estimate size ~w Actual size ~w",
|
||||
[GuessCount div 60, EstimateCount div 40, FullFoldCount div 10]
|
||||
),
|
||||
|
||||
UpdateMemoryTracker = memory_tracking(update, 1000),
|
||||
UpdateAccountant = accounting(update, 1000, ProfileList),
|
||||
TotalUpdateTime =
|
||||
rotate_chunk(Bookie1, <<"UpdBucket">>, KeyCount div 50, ObjSize),
|
||||
|
||||
{MT5, MP5, MB5} = memory_usage(),
|
||||
ok = stop_accounting(UpdateAccountant),
|
||||
{MT6, MP6, MB6} = stop_tracker(UpdateMemoryTracker),
|
||||
|
||||
DiskSpace = lists:nth(1, string:tokens(os:cmd("du -sh riakLoad"), "\t")),
|
||||
ct:log(?INFO, "Disk space taken by test ~s", [DiskSpace]),
|
||||
|
@ -202,15 +277,18 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
{KeyCount, ObjSize, {PM, LC},
|
||||
TotalLoadTime,
|
||||
TotalHeadTime, TotalGetTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, FullFoldTime, SegFoldTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, RegexQueryTime,
|
||||
FullFoldTime div 1000, SegFoldTime,
|
||||
TotalUpdateTime,
|
||||
DiskSpace,
|
||||
{(MT0 + MT1 + MT2 + MT3 + MT4 + MT5) div 6000000,
|
||||
(MP0 + MP1 + MP2 + MP3 + MP4 + MP5) div 6000000,
|
||||
(MB0 + MB1 + MB2 + MB3 + MB4 + MB5) div 6000000},
|
||||
{(MT0 + MT1 + MT2 + MT3a + MT3b + MT3c + MT4a + MT4b + MT5 + MT6)
|
||||
div 9,
|
||||
(MP0 + MP1 + MP2 + MP3a + MP3b + MP3c + MP4a + MP4b + MP5 + MP6)
|
||||
div 9,
|
||||
(MB0 + MB1 + MB2 + MB3a + MB3b + MB3c + MB4a + MB4b + MB5 + MB6)
|
||||
div 9},
|
||||
SSTPids, CDBPids}.
|
||||
|
||||
|
||||
profile_test(Bookie, ProfileFun, P) ->
|
||||
{Inker, Pcl, SSTPids, PClerk, CDBPids, IClerk} = get_pids(Bookie),
|
||||
TestPid = self(),
|
||||
|
@ -232,7 +310,8 @@ output_result(
|
|||
{KeyCount, ObjSize, PressMethod,
|
||||
TotalLoadTime,
|
||||
TotalHeadTime, TotalGetTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, TotalFullFoldTime, TotalSegFoldTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, RegexQueryTime,
|
||||
TotalFullFoldTime, TotalSegFoldTime,
|
||||
TotalUpdateTime,
|
||||
DiskSpace,
|
||||
{TotalMemoryMB, ProcessMemoryMB, BinaryMemoryMB},
|
||||
|
@ -248,6 +327,7 @@ output_result(
|
|||
"TotalGetTime - ~w ms~n"
|
||||
"TotalQueryTime - ~w ms~n"
|
||||
"TotalMiniQueryTime - ~w ms~n"
|
||||
"TotalRegexQueryTime - ~w ms~n"
|
||||
"TotalFullFoldTime - ~w ms~n"
|
||||
"TotalAAEFoldTime - ~w ms~n"
|
||||
"TotalUpdateTime - ~w ms~n"
|
||||
|
@ -257,7 +337,8 @@ output_result(
|
|||
"Closing count of CDB Files - ~w~n",
|
||||
[KeyCount, ObjSize, PressMethod,
|
||||
TotalLoadTime, TotalHeadTime, TotalGetTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, TotalFullFoldTime, TotalSegFoldTime,
|
||||
TotalQueryTime, TotalMiniQueryTime, RegexQueryTime,
|
||||
TotalFullFoldTime, TotalSegFoldTime,
|
||||
TotalUpdateTime,
|
||||
DiskSpace,
|
||||
TotalMemoryMB, ProcessMemoryMB, BinaryMemoryMB,
|
||||
|
@ -265,7 +346,6 @@ output_result(
|
|||
).
|
||||
|
||||
memory_usage() ->
|
||||
garbage_collect(), % GC the test process
|
||||
MemoryUsage = erlang:memory(),
|
||||
{element(2, lists:keyfind(total, 1, MemoryUsage)),
|
||||
element(2, lists:keyfind(processes, 1, MemoryUsage)),
|
||||
|
@ -280,45 +360,12 @@ profile_app(Pids, ProfiledFun, P) ->
|
|||
|
||||
eprof:stop_profiling(),
|
||||
eprof:log(atom_to_list(P) ++ ".log"),
|
||||
eprof:analyze(total, [{filter, [{calls, 100}, {time, 200000}]}]),
|
||||
eprof:analyze(total, [{filter, [{time, 150000}]}]),
|
||||
eprof:stop(),
|
||||
{ok, Analysis} = file:read_file(atom_to_list(P) ++ ".log"),
|
||||
io:format(user, "~n~s~n", [Analysis])
|
||||
.
|
||||
|
||||
size_estimate_summary(Bookie) ->
|
||||
Loops = 10,
|
||||
ct:log(
|
||||
?INFO,
|
||||
"Size Estimate Tester (SET) started with Loops ~w",
|
||||
[Loops]
|
||||
),
|
||||
{{TotalGuessTime, TotalEstimateTime, TotalCountTime},
|
||||
{TotalEstimateVariance, TotalGuessVariance}} =
|
||||
lists:foldl(
|
||||
fun(_I, {{GT, ET, CT}, {AET, AGT}}) ->
|
||||
{{GT0, ET0, CT0}, {AE0, AG0}} = size_estimate_tester(Bookie),
|
||||
{{GT + GT0, ET + ET0, CT + CT0}, {AET + AE0, AGT + AG0}}
|
||||
end,
|
||||
{{0, 0, 0}, {0, 0}},
|
||||
lists:seq(1, Loops)
|
||||
),
|
||||
ct:log(
|
||||
?INFO,
|
||||
"SET: MeanGuess ~w ms MeanEstimate ~w ms MeanCount ~w ms",
|
||||
[TotalGuessTime div 10000,
|
||||
TotalEstimateTime div 10000,
|
||||
TotalCountTime div 10000]
|
||||
),
|
||||
ct:log(
|
||||
?INFO,
|
||||
"Mean variance in Estimate ~w Guess ~w",
|
||||
[TotalEstimateVariance div Loops, TotalGuessVariance div Loops]
|
||||
),
|
||||
%% Assume that segment-list folds are 10 * as common as all folds
|
||||
{TotalCountTime div 1000, (TotalGuessTime + TotalEstimateTime) div 1000}.
|
||||
|
||||
|
||||
rotate_chunk(Bookie, Bucket, KeyCount, ObjSize) ->
|
||||
ct:log(
|
||||
?INFO,
|
||||
|
@ -336,15 +383,6 @@ rotate_chunk(Bookie, Bucket, KeyCount, ObjSize) ->
|
|||
end),
|
||||
TC div 1000.
|
||||
|
||||
load_chunk(Bookie, CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
||||
ct:log(?INFO, "Generating and loading ObjList ~w", [Chunk]),
|
||||
ObjList =
|
||||
generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk),
|
||||
{TC, ok} = timer:tc(fun() -> testutil:riakload(Bookie, ObjList) end),
|
||||
garbage_collect(),
|
||||
timer:sleep(2000),
|
||||
TC.
|
||||
|
||||
generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
||||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
|
@ -354,31 +392,60 @@ generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
|||
Bucket
|
||||
).
|
||||
|
||||
size_estimate_tester(Bookie) ->
|
||||
%% Data size test - calculate data size, then estimate data size
|
||||
{CountTS, Count} = counter(Bookie, full),
|
||||
{CountTSEstimate, CountEstimate} = counter(Bookie, estimate),
|
||||
{CountTSGuess, CountGuess} = counter(Bookie, guess),
|
||||
{GuessTolerance, EstimateTolerance} =
|
||||
case Count of
|
||||
C when C < 500000 ->
|
||||
{0.20, 0.15};
|
||||
C when C < 1000000 ->
|
||||
{0.12, 0.1};
|
||||
C when C < 2000000 ->
|
||||
{0.1, 0.08};
|
||||
_C ->
|
||||
{0.08, 0.05}
|
||||
end,
|
||||
load_chunk(Bookie, CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
||||
ct:log(?INFO, "Generating and loading ObjList ~w", [Chunk]),
|
||||
time_load_chunk(
|
||||
Bookie,
|
||||
{Bucket, base64:encode(leveled_rand:rand_bytes(ObjSize)), IndexGenFun(Chunk)},
|
||||
(Chunk - 1) * CountPerList + 1,
|
||||
Chunk * CountPerList,
|
||||
0,
|
||||
0
|
||||
).
|
||||
|
||||
true =
|
||||
((CountGuess / Count) > (1.0 - GuessTolerance))
|
||||
and ((CountGuess / Count) < (1.0 + GuessTolerance)),
|
||||
true =
|
||||
((CountEstimate / Count) > (1.0 - EstimateTolerance))
|
||||
and ((CountEstimate / Count) < (1.0 + EstimateTolerance)),
|
||||
{{CountTSGuess, CountTSEstimate, CountTS},
|
||||
{abs(CountEstimate - Count), abs(CountGuess - Count)}}.
|
||||
time_load_chunk(
|
||||
_Bookie, _ObjDetails, KeyNumber, TopKey, TotalTime, PC)
|
||||
when KeyNumber > TopKey ->
|
||||
garbage_collect(),
|
||||
timer:sleep(2000),
|
||||
ct:log(
|
||||
?INFO,
|
||||
"Count of ~w pauses during chunk load",
|
||||
[PC]
|
||||
),
|
||||
TotalTime;
|
||||
time_load_chunk(
|
||||
Bookie, {Bucket, Value, IndexGen}, KeyNumber, TopKey, TotalTime, PC) ->
|
||||
ThisProcess = self(),
|
||||
spawn(
|
||||
fun() ->
|
||||
{RiakObj, IndexSpecs} =
|
||||
testutil:set_object(
|
||||
Bucket, testutil:fixed_bin_key(KeyNumber), Value, IndexGen, []),
|
||||
{TC, R} =
|
||||
timer:tc(
|
||||
testutil, book_riakput, [Bookie, RiakObj, IndexSpecs]
|
||||
),
|
||||
case R of
|
||||
ok ->
|
||||
ThisProcess! {TC, 0};
|
||||
pause ->
|
||||
timer:sleep(40),
|
||||
ThisProcess ! {TC + 40000, 1}
|
||||
end
|
||||
end
|
||||
),
|
||||
receive
|
||||
{PutTime, Pause} ->
|
||||
time_load_chunk(
|
||||
Bookie,
|
||||
{Bucket, Value, IndexGen},
|
||||
KeyNumber + 1,
|
||||
TopKey,
|
||||
TotalTime + PutTime,
|
||||
PC + Pause
|
||||
)
|
||||
end.
|
||||
|
||||
counter(Bookie, full) ->
|
||||
{async, DataSizeCounter} =
|
||||
|
@ -496,6 +563,42 @@ random_queries(Bookie, Bucket, IDs, IdxCnt, MaxRange, IndexesReturned) ->
|
|||
TC div 1000.
|
||||
|
||||
|
||||
random_people_queries(Bookie, Bucket, IndexesReturned) ->
|
||||
SeventiesWillowRegex =
|
||||
"[^\\|]*\\|197[0-9]{5}\\|[^\\|]*\\|"
|
||||
"[^\\|]*#Willow[^\\|]*\\|[^\\|]*#LS[^\\|]*",
|
||||
%% born in the 70s with Willow as a given name
|
||||
QueryFun =
|
||||
fun() ->
|
||||
Surname = get_random_surname(),
|
||||
Range =
|
||||
{?PEOPLE_INDEX,
|
||||
Surname,
|
||||
<<Surname/binary, 126:8/integer>>
|
||||
},
|
||||
{ok, TermRegex} =
|
||||
re:compile(SeventiesWillowRegex),
|
||||
FoldKeysFun = fun(_B, _K, Cnt) -> Cnt + 1 end,
|
||||
{async, R} =
|
||||
leveled_bookie:book_indexfold(
|
||||
Bookie,
|
||||
{Bucket, <<>>},
|
||||
{FoldKeysFun, 0},
|
||||
Range,
|
||||
{true, TermRegex}),
|
||||
R()
|
||||
end,
|
||||
|
||||
{TC, {QC, EF}} =
|
||||
timer:tc(fun() -> run_queries(QueryFun, 0, 0, IndexesReturned) end),
|
||||
ct:log(
|
||||
?INFO,
|
||||
"Fetch of ~w index entries by regex in ~w queries took ~w ms",
|
||||
[EF, QC, TC div 1000]
|
||||
),
|
||||
TC div 1000.
|
||||
|
||||
|
||||
run_queries(_QueryFun, QueryCount, EntriesFound, TargetEntries)
|
||||
when EntriesFound >= TargetEntries ->
|
||||
{QueryCount, EntriesFound};
|
||||
|
@ -511,7 +614,8 @@ profile_fun(
|
|||
{Bookie, Bucket, _KeyCount, _ObjSize, IndexCount, IndexesReturned}) ->
|
||||
fun() ->
|
||||
random_queries(
|
||||
Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned div 8)
|
||||
Bookie, Bucket, 10, IndexCount, QuerySize,
|
||||
IndexesReturned div ?MINI_QUERY_DIVISOR)
|
||||
end;
|
||||
profile_fun(
|
||||
{query, QuerySize},
|
||||
|
@ -520,6 +624,13 @@ profile_fun(
|
|||
random_queries(
|
||||
Bookie, Bucket, 10, IndexCount, QuerySize, IndexesReturned)
|
||||
end;
|
||||
profile_fun(
|
||||
regex_query,
|
||||
{Bookie, Bucket, _KeyCount, _ObjSize, _IndexCount, IndexesReturned}) ->
|
||||
fun() ->
|
||||
random_people_queries(
|
||||
Bookie, Bucket, IndexesReturned div ?RGEX_QUERY_DIVISOR)
|
||||
end;
|
||||
profile_fun(
|
||||
{head, HeadFetches},
|
||||
{Bookie, Bucket, KeyCount, _ObjSize, _IndexCount, _IndexesReturned}) ->
|
||||
|
@ -549,11 +660,230 @@ profile_fun(
|
|||
profile_fun(
|
||||
CounterFold,
|
||||
{Bookie, _Bucket, _KeyCount, _ObjSize, _IndexCount, _IndexesReturned}) ->
|
||||
Runs =
|
||||
case CounterFold of
|
||||
full ->
|
||||
20;
|
||||
estimate ->
|
||||
40;
|
||||
guess ->
|
||||
100
|
||||
end,
|
||||
fun() ->
|
||||
lists:foreach(
|
||||
fun(_I) ->
|
||||
_ = counter(Bookie, CounterFold)
|
||||
end,
|
||||
lists:seq(1, 10)
|
||||
lists:seq(1, Runs)
|
||||
)
|
||||
end.
|
||||
|
||||
random_people_index() ->
|
||||
io_lib:format(
|
||||
"~s|~s|~s|#~s#~s#~s|#~s#~s#~s",
|
||||
[get_random_surname(),
|
||||
get_random_dob(),
|
||||
get_random_dod(),
|
||||
get_random_givenname(), get_random_givenname(), get_random_givenname(),
|
||||
get_random_postcode(), get_random_postcode(), get_random_postcode()
|
||||
]
|
||||
).
|
||||
|
||||
get_random_surname() ->
|
||||
lists:nth(
|
||||
rand:uniform(100),
|
||||
[<<"Smith">>, <<"Jones">>, <<"Taylor">>, <<"Brown">>, <<"Williams">>,
|
||||
<<"Wilson">>, <<"Johnson">>, <<"Davies">>, <<"Patel">>, <<"Robinson">>,
|
||||
<<"Wright">>, <<"Thompson">>, <<"Evans">>, <<"Walker">>, <<"White">>,
|
||||
<<"Roberts">>, <<"Green">>, <<"Hall">>, <<"Thomas">>, <<"Clarke">>,
|
||||
<<"Jackson">>, <<"Wood">>, <<"Harris">>, <<"Edwards">>, <<"Turner">>,
|
||||
<<"Martin">>, <<"Cooper">>, <<"Hill">>, <<"Ward">>, <<"Hughes">>,
|
||||
<<"Moore">>, <<"Clark">>, <<"King">>, <<"Harrison">>, <<"Lewis">>,
|
||||
<<"Baker">>, <<"Lee">>, <<"Allen">>, <<"Morris">>, <<"Khan">>,
|
||||
<<"Scott">>, <<"Watson">>, <<"Davis">>, <<"Parker">>, <<"James">>,
|
||||
<<"Bennett">>, <<"Young">>, <<"Phillips">>, <<"Richardson">>, <<"Mitchell">>,
|
||||
<<"Bailey">>, <<"Carter">>, <<"Cook">>, <<"Singh">>, <<"Shaw">>,
|
||||
<<"Bell">>, <<"Collins">>, <<"Morgan">>, <<"Kelly">>, <<"Begum">>,
|
||||
<<"Miller">>, <<"Cox">>, <<"Hussain">>, <<"Marshall">>, <<"Simpson">>,
|
||||
<<"Price">>, <<"Anderson">>, <<"Adams">>, <<"Wilkinson">>, <<"Ali">>,
|
||||
<<"Ahmed">>, <<"Foster">>, <<"Ellis">>, <<"Murphy">>, <<"Chapman">>,
|
||||
<<"Mason">>, <<"Gray">>, <<"Richards">>, <<"Webb">>, <<"Griffiths">>,
|
||||
<<"Hunt">>, <<"Palmer">>, <<"Campbell">>, <<"Holmes">>, <<"Mills">>,
|
||||
<<"Rogers">>, <<"Barnes">>, <<"Knight">>, <<"Matthews">>, <<"Barker">>,
|
||||
<<"Powell">>, <<"Stevens">>, <<"Kaur">>, <<"Fisher">>, <<"Butler">>,
|
||||
<<"Dixon">>, <<"Russell">>, <<"Harvey">>, <<"Pearson">>, <<"Graham">>]
|
||||
).
|
||||
|
||||
get_random_givenname() ->
|
||||
lists:nth(
|
||||
rand:uniform(20),
|
||||
[<<"Noah">>, <<"Oliver">>, <<"George">>, <<"Arthur">>, <<"Muhammad">>,
|
||||
<<"Leo">>, <<"Harry">>, <<"Oscar">> , <<"Archie">>, <<"Henry">>,
|
||||
<<"Olivia">>, <<"Amelia">>, <<"Isla">>, <<"Ava">>, <<"Ivy">>,
|
||||
<<"Freya">>, <<"Lily">>, <<"Florence">>, <<"Mia">>, <<"Willow">>
|
||||
]).
|
||||
|
||||
get_random_dob() ->
|
||||
io_lib:format(
|
||||
"~4..0B~2..0B~2..0B",
|
||||
[1900 + rand:uniform(99), rand:uniform(12), rand:uniform(28)]
|
||||
).
|
||||
|
||||
get_random_dod() ->
|
||||
io_lib:format(
|
||||
"~4..0B~2..0B~2..0B",
|
||||
[2000 + rand:uniform(20), rand:uniform(12), rand:uniform(28)]
|
||||
).
|
||||
|
||||
get_random_postcode() ->
|
||||
io_lib:format(
|
||||
"LS~w ~wXX", [rand:uniform(26), rand:uniform(9)]
|
||||
).
|
||||
|
||||
|
||||
memory_tracking(Phase, Timeout) ->
|
||||
spawn(
|
||||
fun() ->
|
||||
memory_tracking(Phase, Timeout, {0, 0, 0}, 0)
|
||||
end
|
||||
).
|
||||
|
||||
memory_tracking(Phase, Timeout, {TAcc, PAcc, BAcc}, Loops) ->
|
||||
receive
|
||||
{stop, Caller} ->
|
||||
{T, P, B} = memory_usage(),
|
||||
TAvg = (T + TAcc) div ((Loops + 1) * 1000000),
|
||||
PAvg = (P + PAcc) div ((Loops + 1) * 1000000),
|
||||
BAvg = (B + BAcc) div ((Loops + 1) * 1000000),
|
||||
print_memory_stats(Phase, TAvg, PAvg, BAvg),
|
||||
Caller ! {TAvg, PAvg, BAvg}
|
||||
after Timeout ->
|
||||
{T, P, B} = memory_usage(),
|
||||
memory_tracking(
|
||||
Phase, Timeout, {TAcc + T, PAcc + P, BAcc + B}, Loops + 1)
|
||||
end.
|
||||
|
||||
|
||||
-if(?performance == riak_ctperf).
|
||||
print_memory_stats(_Phase, _TAvg, _PAvg, _BAvg) ->
|
||||
ok.
|
||||
-else.
|
||||
print_memory_stats(Phase, TAvg, PAvg, BAvg) ->
|
||||
io:format(
|
||||
user,
|
||||
"~nFor ~w memory stats: total ~wMB process ~wMB binary ~wMB~n",
|
||||
[Phase, TAvg, PAvg, BAvg]
|
||||
).
|
||||
-endif.
|
||||
|
||||
dummy_accountant() ->
|
||||
spawn(fun() -> receive {stop, Caller} -> Caller ! ok end end).
|
||||
|
||||
stop_accounting(Accountant) ->
|
||||
Accountant ! {stop, self()},
|
||||
receive ok -> ok end.
|
||||
|
||||
stop_tracker(Tracker) ->
|
||||
garbage_collect(),
|
||||
% Garbage collect the test process, before getting the memory stats
|
||||
Tracker ! {stop, self()},
|
||||
receive MemStats -> MemStats end.
|
||||
|
||||
-if(?ACCOUNTING).
|
||||
|
||||
-define(ACCT_TYPES, [scheduler, dirty_io_scheduler, dirty_cpu_scheduler, aux]).
|
||||
|
||||
accounting(Phase, Timeout, ProfileList) ->
|
||||
case lists:member(Phase, ProfileList) of
|
||||
true ->
|
||||
ZeroCounters =
|
||||
#{
|
||||
emulator => 0,
|
||||
aux => 0,
|
||||
check_io => 0,
|
||||
gc => 0,
|
||||
other => 0
|
||||
},
|
||||
InitCounters =
|
||||
lists:map(fun(T) -> {T, ZeroCounters} end, ?ACCT_TYPES),
|
||||
spawn(
|
||||
fun() ->
|
||||
accounting(Phase, Timeout, maps:from_list(InitCounters), 0)
|
||||
end
|
||||
);
|
||||
false ->
|
||||
dummy_accountant()
|
||||
end.
|
||||
|
||||
accounting(Phase, Timeout, Counters, Loops) ->
|
||||
receive
|
||||
{stop, Caller} ->
|
||||
io:format(
|
||||
user,
|
||||
"~n~nStats for Phase ~p after loops ~p:~n",
|
||||
[Phase, Loops]
|
||||
),
|
||||
lists:foreach(
|
||||
fun(S) ->
|
||||
scheduler_output(S, maps:get(S, Counters))
|
||||
end,
|
||||
?ACCT_TYPES
|
||||
),
|
||||
Caller ! ok
|
||||
after Timeout ->
|
||||
msacc:start(Timeout div 5),
|
||||
UpdCounters =
|
||||
lists:foldl(
|
||||
fun(StatMap, CountersAcc) ->
|
||||
Type = maps:get(type, StatMap),
|
||||
case lists:member(Type, ?ACCT_TYPES) of
|
||||
true ->
|
||||
TypeAcc =
|
||||
maps:intersect_with(
|
||||
fun(_K, V1, V2) -> V1 + V2 end,
|
||||
maps:get(counters, StatMap),
|
||||
maps:get(Type, CountersAcc)
|
||||
),
|
||||
maps:update(Type, TypeAcc, CountersAcc);
|
||||
false ->
|
||||
CountersAcc
|
||||
end
|
||||
end,
|
||||
Counters,
|
||||
msacc:stats()
|
||||
),
|
||||
accounting(Phase, Timeout, UpdCounters, Loops + 1)
|
||||
end.
|
||||
|
||||
scheduler_output(Scheduler, CounterMap) ->
|
||||
Total =
|
||||
maps:get(emulator, CounterMap) +
|
||||
maps:get(aux, CounterMap) +
|
||||
maps:get(check_io, CounterMap) +
|
||||
maps:get(gc, CounterMap) +
|
||||
maps:get(other, CounterMap),
|
||||
GC = maps:get(gc, CounterMap),
|
||||
GCperc = case Total > 0 of true -> GC/Total; false -> 0.0 end,
|
||||
io:format(
|
||||
user,
|
||||
"~nFor ~w:~n"
|
||||
"emulator=~w, aux=~w, check_io=~w, gc=~w, other=~w~n"
|
||||
"total ~w~n"
|
||||
"percentage_gc ~.2f %~n",
|
||||
[Scheduler,
|
||||
maps:get(emulator, CounterMap),
|
||||
maps:get(aux, CounterMap),
|
||||
maps:get(check_io, CounterMap),
|
||||
GC,
|
||||
maps:get(other, CounterMap),
|
||||
Total,
|
||||
GCperc
|
||||
]
|
||||
).
|
||||
|
||||
-else.
|
||||
|
||||
accounting(_Phase, _Timeout, _ProfileList) ->
|
||||
dummy_accountant().
|
||||
|
||||
-endif.
|
|
@ -483,12 +483,14 @@ rotate_wipe_compact(Strategy1, Strategy2) ->
|
|||
{ok, Book3} = leveled_bookie:book_start(BookOptsAlt),
|
||||
|
||||
{KSpcL2, _V2} = testutil:put_indexed_objects(Book3, "AltBucket6", 3000),
|
||||
Q2 = fun(RT) -> {index_query,
|
||||
"AltBucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
Q2 =
|
||||
fun(RT) ->
|
||||
{index_query,
|
||||
"AltBucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
|
||||
KeyList2A = lists:usort(KFolder2A()),
|
||||
true = length(KeyList2A) == 3000,
|
||||
|
@ -629,12 +631,14 @@ recovr_strategy(_Config) ->
|
|||
true = VCH == VCG
|
||||
end,
|
||||
lists:nthtail(6400, AllSpcL)),
|
||||
Q = fun(RT) -> {index_query,
|
||||
"Bucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
Q =
|
||||
fun(RT) ->
|
||||
{index_query,
|
||||
"Bucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
{async, TFolder} = leveled_bookie:book_returnfolder(Book1, Q(true)),
|
||||
KeyTermList = TFolder(),
|
||||
{async, KFolder} = leveled_bookie:book_returnfolder(Book1, Q(false)),
|
||||
|
@ -660,12 +664,14 @@ recovr_strategy(_Config) ->
|
|||
KeyList2 = lists:usort(KFolder2()),
|
||||
true = length(KeyList2) == 6400,
|
||||
|
||||
Q2 = fun(RT) -> {index_query,
|
||||
"AltBucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
Q2 =
|
||||
fun(RT) ->
|
||||
{index_query,
|
||||
"AltBucket6",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{RT, undefined}}
|
||||
end,
|
||||
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book2, Q2(false)),
|
||||
KeyList2A = lists:usort(KFolder2A()),
|
||||
true = length(KeyList2A) == 3000,
|
||||
|
|
|
@ -68,7 +68,7 @@
|
|||
compact_and_wait/1]).
|
||||
|
||||
-define(RETURN_TERMS, {true, undefined}).
|
||||
-define(SLOWOFFER_DELAY, 10).
|
||||
-define(SLOWOFFER_DELAY, 40).
|
||||
-define(V1_VERS, 1).
|
||||
-define(MAGIC, 53). % riak_kv -> riak_object
|
||||
-define(MD_VTAG, <<"X-Riak-VTag">>).
|
||||
|
@ -691,21 +691,24 @@ load_objects(ChunkSize, GenList, Bookie, TestObject, Generator, SubListL) ->
|
|||
|
||||
|
||||
get_randomindexes_generator(Count) ->
|
||||
Generator = fun() ->
|
||||
lists:map(fun(X) ->
|
||||
{add,
|
||||
"idx" ++ integer_to_list(X) ++ "_bin",
|
||||
get_randomdate() ++ get_randomname()} end,
|
||||
lists:seq(1, Count))
|
||||
Generator =
|
||||
fun() ->
|
||||
lists:map(
|
||||
fun(X) ->
|
||||
{add,
|
||||
list_to_binary("idx" ++ integer_to_list(X) ++ "_bin"),
|
||||
list_to_binary(get_randomdate() ++ get_randomname())}
|
||||
end,
|
||||
lists:seq(1, Count))
|
||||
end,
|
||||
Generator.
|
||||
|
||||
name_list() ->
|
||||
[{1, "Sophia"}, {2, "Emma"}, {3, "Olivia"}, {4, "Ava"},
|
||||
{5, "Isabella"}, {6, "Mia"}, {7, "Zoe"}, {8, "Lily"},
|
||||
{9, "Emily"}, {10, "Madelyn"}, {11, "Madison"}, {12, "Chloe"},
|
||||
{13, "Charlotte"}, {14, "Aubrey"}, {15, "Avery"},
|
||||
{16, "Abigail"}].
|
||||
{5, "Isabella"}, {6, "Mia"}, {7, "Zoe"}, {8, "Lily"},
|
||||
{9, "Emily"}, {10, "Madelyn"}, {11, "Madison"}, {12, "Chloe"},
|
||||
{13, "Charlotte"}, {14, "Aubrey"}, {15, "Avery"},
|
||||
{16, "Abigail"}].
|
||||
|
||||
get_randomname() ->
|
||||
NameList = name_list(),
|
||||
|
@ -738,7 +741,7 @@ check_indexed_objects(Book, B, KSpecL, V) ->
|
|||
fun({K, Spc}) ->
|
||||
{ok, O} = book_riakget(Book, B, K),
|
||||
V = testutil:get_value(O),
|
||||
{add, "idx1_bin", IdxVal} = lists:keyfind(add, 1, Spc),
|
||||
{add, <<"idx1_bin">>, IdxVal} = lists:keyfind(add, 1, Spc),
|
||||
{IdxVal, K}
|
||||
end,
|
||||
KSpecL),
|
||||
|
@ -749,7 +752,7 @@ check_indexed_objects(Book, B, KSpecL, V) ->
|
|||
{index_query,
|
||||
B,
|
||||
{fun foldkeysfun/3, []},
|
||||
{"idx1_bin", "0", "|"},
|
||||
{<<"idx1_bin">>, <<"0">>, <<"|">>},
|
||||
?RETURN_TERMS}),
|
||||
SW = os:timestamp(),
|
||||
{async, Fldr} = R,
|
||||
|
@ -796,11 +799,12 @@ put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i) ->
|
|||
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V).
|
||||
|
||||
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V) ->
|
||||
SW = os:timestamp(),
|
||||
IndexGen = get_randomindexes_generator(1),
|
||||
|
||||
ThisProcess = self(),
|
||||
FindAdditionFun = fun(SpcItem) -> element(1, SpcItem) == add end,
|
||||
MapFun =
|
||||
fun({K, Spc}) ->
|
||||
fun({K, Spc}, Acc) ->
|
||||
OldSpecs = lists:filter(FindAdditionFun, Spc),
|
||||
{RemoveSpc, AddSpc} =
|
||||
case RemoveOld2i of
|
||||
|
@ -809,26 +813,45 @@ put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V) ->
|
|||
false ->
|
||||
{[], OldSpecs}
|
||||
end,
|
||||
{O, DeltaSpecs} =
|
||||
set_object(Bucket, K, V,
|
||||
IndexGen, RemoveSpc, AddSpc),
|
||||
% DeltaSpecs should be new indexes added, and any old indexes which
|
||||
% have been removed by this change where RemoveOld2i is true.
|
||||
%
|
||||
% The actual indexes within the object should reflect any history
|
||||
% of indexes i.e. when RemoveOld2i is false.
|
||||
%
|
||||
% The [{Key, SpecL}] returned should accrue additions over loops if
|
||||
% RemoveOld2i is false
|
||||
case book_riakput(Book, O, DeltaSpecs) of
|
||||
ok -> ok;
|
||||
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
||||
end,
|
||||
PutFun =
|
||||
fun() ->
|
||||
{O, DeltaSpecs} =
|
||||
set_object(
|
||||
Bucket, K, V, IndexGen, RemoveSpc, AddSpc),
|
||||
% DeltaSpecs should be new indexes added, and any old
|
||||
% indexes which have been removed by this change where
|
||||
% RemoveOld2i is true.
|
||||
%
|
||||
% The actual indexes within the object should reflect any
|
||||
% history of indexes i.e. when RemoveOld2i is false.
|
||||
%
|
||||
% The [{Key, SpecL}] returned should accrue additions over
|
||||
% loops if RemoveOld2i is false
|
||||
R =
|
||||
case book_riakput(Book, O, DeltaSpecs) of
|
||||
ok ->
|
||||
ok;
|
||||
pause ->
|
||||
timer:sleep(?SLOWOFFER_DELAY),
|
||||
pause
|
||||
end,
|
||||
ThisProcess ! {R, DeltaSpecs}
|
||||
end,
|
||||
spawn(PutFun),
|
||||
AccOut =
|
||||
receive
|
||||
{ok, NewSpecs} -> Acc;
|
||||
{pause, NewSpecs} -> Acc + 1
|
||||
end,
|
||||
% Note that order in the SpecL is important, as
|
||||
% check_indexed_objects, needs to find the latest item added
|
||||
{K, DeltaSpecs ++ AddSpc}
|
||||
{{K, NewSpecs ++ AddSpc}, AccOut}
|
||||
end,
|
||||
RplKSpecL = lists:map(MapFun, KSpecL),
|
||||
{RplKSpecL, Pauses} = lists:mapfoldl(MapFun, 0, KSpecL),
|
||||
io:format(
|
||||
"Altering ~w objects took ~w ms with ~w pauses~n",
|
||||
[length(KSpecL), timer:now_diff(os:timestamp(), SW) div 1000, Pauses]
|
||||
),
|
||||
{RplKSpecL, V}.
|
||||
|
||||
rotating_object_check(RootPath, B, NumberOfObjects) ->
|
||||
|
|
|
@ -378,10 +378,13 @@ index_compare(_Config) ->
|
|||
GetTicTacTreeFun =
|
||||
fun(X, Bookie) ->
|
||||
SW = os:timestamp(),
|
||||
ST = "!",
|
||||
ET = "|",
|
||||
ST = <<"!">>,
|
||||
ET = <<"|">>,
|
||||
Q = {tictactree_idx,
|
||||
{BucketBin, "idx" ++ integer_to_list(X) ++ "_bin", ST, ET},
|
||||
{BucketBin,
|
||||
list_to_binary("idx" ++ integer_to_list(X) ++ "_bin"),
|
||||
ST,
|
||||
ET},
|
||||
TreeSize,
|
||||
fun(_B, _K) -> accumulate end},
|
||||
{async, Folder} = leveled_bookie:book_returnfolder(Bookie, Q),
|
||||
|
@ -442,12 +445,14 @@ index_compare(_Config) ->
|
|||
true = DL2_0 == [],
|
||||
true = length(DL2_1) > 100,
|
||||
|
||||
IdxSpc = {add, "idx2_bin", "zz999"},
|
||||
{TestObj, TestSpc} = testutil:generate_testobject(BucketBin,
|
||||
term_to_binary("K9.Z"),
|
||||
"Value1",
|
||||
[IdxSpc],
|
||||
[{"MDK1", "MDV1"}]),
|
||||
IdxSpc = {add, <<"idx2_bin">>, <<"zz999">>},
|
||||
{TestObj, TestSpc} =
|
||||
testutil:generate_testobject(
|
||||
BucketBin,
|
||||
term_to_binary("K9.Z"),
|
||||
"Value1",
|
||||
[IdxSpc],
|
||||
[{"MDK1", "MDV1"}]),
|
||||
ok = testutil:book_riakput(Book2C, TestObj, TestSpc),
|
||||
testutil:check_forobject(Book2C, TestObj),
|
||||
|
||||
|
@ -457,25 +462,30 @@ index_compare(_Config) ->
|
|||
TicTacTree3_P3 = GetTicTacTreeFun(2, Book2D),
|
||||
|
||||
% Merge the tree across the partitions
|
||||
TicTacTree3_Joined = lists:foldl(fun leveled_tictac:merge_trees/2,
|
||||
TicTacTree3_P1,
|
||||
[TicTacTree3_P2, TicTacTree3_P3]),
|
||||
TicTacTree3_Joined =
|
||||
lists:foldl(
|
||||
fun leveled_tictac:merge_trees/2,
|
||||
TicTacTree3_P1,
|
||||
[TicTacTree3_P2, TicTacTree3_P3]),
|
||||
|
||||
% Find all keys index, and then just the last key
|
||||
IdxQ1 = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx2_bin", "zz", "zz|"},
|
||||
{<<"idx2_bin">>, <<"zz">>, <<"zz|">>},
|
||||
{true, undefined}},
|
||||
{async, IdxFolder1} = leveled_bookie:book_returnfolder(Book2C, IdxQ1),
|
||||
true = IdxFolder1() >= 1,
|
||||
|
||||
DL_3to2B = leveled_tictac:find_dirtyleaves(TicTacTree2_P1,
|
||||
TicTacTree3_P1),
|
||||
DL_3to2C = leveled_tictac:find_dirtyleaves(TicTacTree2_P2,
|
||||
TicTacTree3_P2),
|
||||
DL_3to2D = leveled_tictac:find_dirtyleaves(TicTacTree2_P3,
|
||||
TicTacTree3_P3),
|
||||
DL_3to2B =
|
||||
leveled_tictac:find_dirtyleaves(
|
||||
TicTacTree2_P1, TicTacTree3_P1),
|
||||
DL_3to2C =
|
||||
leveled_tictac:find_dirtyleaves(
|
||||
TicTacTree2_P2, TicTacTree3_P2),
|
||||
DL_3to2D =
|
||||
leveled_tictac:find_dirtyleaves(
|
||||
TicTacTree2_P3, TicTacTree3_P3),
|
||||
io:format("Individual tree comparison found dirty leaves of ~w ~w ~w~n",
|
||||
[DL_3to2B, DL_3to2C, DL_3to2D]),
|
||||
|
||||
|
@ -509,7 +519,7 @@ index_compare(_Config) ->
|
|||
MismatchQ = {index_query,
|
||||
BucketBin,
|
||||
{FoldKeysIndexQFun, []},
|
||||
{"idx2_bin", "!", "|"},
|
||||
{<<"idx2_bin">>, <<"!">>, <<"|">>},
|
||||
{true, undefined}},
|
||||
{async, MMFldr_2A} = leveled_bookie:book_returnfolder(Book2A, MismatchQ),
|
||||
{async, MMFldr_2B} = leveled_bookie:book_returnfolder(Book2B, MismatchQ),
|
||||
|
@ -531,7 +541,7 @@ index_compare(_Config) ->
|
|||
io:format("Differences between lists ~w~n", [Diffs]),
|
||||
|
||||
% The actual difference is discovered
|
||||
true = lists:member({"zz999", term_to_binary("K9.Z")}, Diffs),
|
||||
true = lists:member({<<"zz999">>, term_to_binary("K9.Z")}, Diffs),
|
||||
% Without discovering too many others
|
||||
true = length(Diffs) < 20,
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue