Segment filter and multiple keys in slot
An issue was spotted. If we use a segment filter in a query, and there are multiple matches within a given slot - only the first match is returned. Tests didn't detect this. Now they do, and the issue is resolved.
This commit is contained in:
parent
cbf6e26fc8
commit
18aabb49ba
2 changed files with 67 additions and 35 deletions
|
@ -369,7 +369,6 @@ sst_getkvrange(Pid, StartKey, EndKey, ScanWidth) ->
|
|||
%% leveled_tictac
|
||||
sst_getfilteredrange(Pid, StartKey, EndKey, ScanWidth, SegList) ->
|
||||
SegList0 = tune_seglist(SegList),
|
||||
io:format("Using tuned seglist ~w~n", [SegList0]),
|
||||
case gen_fsm:sync_send_event(Pid,
|
||||
{get_kvrange,
|
||||
StartKey, EndKey,
|
||||
|
@ -1260,7 +1259,12 @@ generate_binary_slot(Lookup, KVL, PressMethod, BuildTimings0) ->
|
|||
|
||||
check_blocks([], _Handle, _StartPos, _BlockLengths, _PosBinLength,
|
||||
_LedgerKeyToCheck, _PressMethod, Acc) ->
|
||||
Acc;
|
||||
case is_list(Acc) of
|
||||
true ->
|
||||
lists:reverse(Acc);
|
||||
false ->
|
||||
Acc
|
||||
end;
|
||||
check_blocks([Pos|Rest], Handle, StartPos, BlockLengths, PosBinLength,
|
||||
LedgerKeyToCheck, PressMethod, Acc) ->
|
||||
{BlockNumber, BlockPos} = revert_position(Pos),
|
||||
|
@ -1278,8 +1282,10 @@ check_blocks([Pos|Rest], Handle, StartPos, BlockLengths, PosBinLength,
|
|||
_ ->
|
||||
case LedgerKeyToCheck of
|
||||
false ->
|
||||
io:format("{K, V} found in block of ~w~n", [{K, V}]),
|
||||
Acc ++ [{K, V}];
|
||||
check_blocks(Rest, Handle, StartPos,
|
||||
BlockLengths, PosBinLength,
|
||||
LedgerKeyToCheck, PressMethod,
|
||||
[{K, V}|Acc]);
|
||||
_ ->
|
||||
check_blocks(Rest, Handle, StartPos,
|
||||
BlockLengths, PosBinLength,
|
||||
|
@ -1382,11 +1388,8 @@ read_slots(Handle, SlotList, {SegList, BlockIndexCache}, PressMethod) ->
|
|||
% other keys
|
||||
case find_pos(BlockIdx, SegList, [], 0) of
|
||||
[] ->
|
||||
io:format("Empty postion list for slot~n"),
|
||||
Acc;
|
||||
PositionList ->
|
||||
io:format("~w positions found for slot~n",
|
||||
[length(PositionList)]),
|
||||
Acc ++
|
||||
check_blocks(PositionList,
|
||||
Handle, SP,
|
||||
|
@ -1686,7 +1689,6 @@ find_pos(<<1:1/integer, PotentialHit:15/integer, T/binary>>,
|
|||
HashList, PosList, Count) when is_list(HashList) ->
|
||||
case lists:member(PotentialHit, HashList) of
|
||||
true ->
|
||||
io:format("Found pos based on ~w~n", [PotentialHit]),
|
||||
find_pos(T, HashList, PosList ++ [Count], Count + 1);
|
||||
false ->
|
||||
find_pos(T, HashList, PosList, Count + 1)
|
||||
|
|
|
@ -61,25 +61,11 @@ crossbucket_aae(_Config) ->
|
|||
%% Check all the objects are found - used to trigger HEAD performance log
|
||||
ok = testutil:checkhead_forlist(Bookie2, lists:nth(1, CLs)),
|
||||
|
||||
% Start a new store, and load the same objects (except fot the original
|
||||
% test object) into this store
|
||||
|
||||
StartOpts3 = [{root_path, RootPathB},
|
||||
{max_journalsize, 200000000},
|
||||
{max_pencillercachesize, 16000},
|
||||
{sync_strategy, testutil:sync_strategy()}],
|
||||
{ok, Bookie3} = leveled_bookie:book_start(StartOpts3),
|
||||
lists:foreach(fun(ObjL) -> testutil:riakload(Bookie3, ObjL) end, CLs),
|
||||
test_singledelta_stores(Bookie2, Bookie3, small, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, medium, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, xsmall, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, xxsmall, {B1, K1}),
|
||||
|
||||
% Test with a newly opend book (i.e with no blovk indexes cached)
|
||||
ok = leveled_bookie:book_close(Bookie2),
|
||||
{ok, Bookie2A} = leveled_bookie:book_start(StartOpts2),
|
||||
test_singledelta_stores(Bookie2A, Bookie3, small, {B1, K1}),
|
||||
|
||||
% This part of the test tests an issue with accelerating folds by segment
|
||||
% list, when there is more than one key with a matching segment in the
|
||||
% slot. Previously this was not handled correctly - and this test part
|
||||
% of the test detects this, by finding slices of keys which are probably
|
||||
% in the same slot
|
||||
SW0 = os:timestamp(),
|
||||
SliceSize = 20,
|
||||
|
||||
|
@ -94,35 +80,78 @@ crossbucket_aae(_Config) ->
|
|||
K = RiakObject#r_object.key,
|
||||
leveled_tictac:keyto_segment32(<<B/binary, K/binary>>)
|
||||
end,
|
||||
BKMapFun =
|
||||
fun({_RN, RiakObject, _Spc}) ->
|
||||
B = RiakObject#r_object.bucket,
|
||||
K = RiakObject#r_object.key,
|
||||
{B, K}
|
||||
end,
|
||||
|
||||
SL1 = lists:map(SegMapFun, CL1),
|
||||
SL2 = lists:map(SegMapFun, CL2),
|
||||
SL3 = lists:map(SegMapFun, CL3),
|
||||
SL4 = lists:map(SegMapFun, CL4),
|
||||
|
||||
BK1 = lists:map(BKMapFun, CL1),
|
||||
BK2 = lists:map(BKMapFun, CL2),
|
||||
BK3 = lists:map(BKMapFun, CL3),
|
||||
BK4 = lists:map(BKMapFun, CL4),
|
||||
|
||||
HeadSegmentFolderGen =
|
||||
fun(SegL) ->
|
||||
fun(SegL, BKL) ->
|
||||
{foldheads_allkeys,
|
||||
?RIAK_TAG,
|
||||
{fun(_B, _K, _PO, Acc) -> Acc + 1 end, 0},
|
||||
{fun(B, K, _PO, Acc) ->
|
||||
case lists:member({B, K}, BKL) of
|
||||
true ->
|
||||
Acc + 1;
|
||||
false ->
|
||||
Acc
|
||||
end
|
||||
end, 0},
|
||||
false, true, SegL}
|
||||
end,
|
||||
|
||||
{async, SL1Folder} =
|
||||
leveled_bookie:book_returnfolder(Bookie3, HeadSegmentFolderGen(SL1)),
|
||||
leveled_bookie:book_returnfolder(Bookie2,
|
||||
HeadSegmentFolderGen(SL1, BK1)),
|
||||
{async, SL2Folder} =
|
||||
leveled_bookie:book_returnfolder(Bookie3, HeadSegmentFolderGen(SL2)),
|
||||
leveled_bookie:book_returnfolder(Bookie2,
|
||||
HeadSegmentFolderGen(SL2, BK2)),
|
||||
{async, SL3Folder} =
|
||||
leveled_bookie:book_returnfolder(Bookie3, HeadSegmentFolderGen(SL3)),
|
||||
leveled_bookie:book_returnfolder(Bookie2,
|
||||
HeadSegmentFolderGen(SL3, BK3)),
|
||||
{async, SL4Folder} =
|
||||
leveled_bookie:book_returnfolder(Bookie3, HeadSegmentFolderGen(SL4)),
|
||||
leveled_bookie:book_returnfolder(Bookie2,
|
||||
HeadSegmentFolderGen(SL4, BK4)),
|
||||
|
||||
Results = [SL1Folder(), SL2Folder(), SL3Folder(), SL4Folder()],
|
||||
lists:foreach(fun(R) -> true = R >= SliceSize end, Results),
|
||||
|
||||
io:format("SegList folders returned results of ~w " ++
|
||||
"for SliceSize ~w in ~w ms~n",
|
||||
[Results, SliceSize,
|
||||
timer:now_diff(os:timestamp(), SW0)/1000]),
|
||||
lists:foreach(fun(R) -> true = R == SliceSize end, Results),
|
||||
|
||||
% Start a new store, and load the same objects (except fot the original
|
||||
% test object) into this store
|
||||
%
|
||||
% This is now the comparison part of the test
|
||||
|
||||
StartOpts3 = [{root_path, RootPathB},
|
||||
{max_journalsize, 200000000},
|
||||
{max_pencillercachesize, 16000},
|
||||
{sync_strategy, testutil:sync_strategy()}],
|
||||
{ok, Bookie3} = leveled_bookie:book_start(StartOpts3),
|
||||
lists:foreach(fun(ObjL) -> testutil:riakload(Bookie3, ObjL) end, CLs),
|
||||
test_singledelta_stores(Bookie2, Bookie3, small, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, medium, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, xsmall, {B1, K1}),
|
||||
test_singledelta_stores(Bookie2, Bookie3, xxsmall, {B1, K1}),
|
||||
|
||||
% Test with a newly opend book (i.e with no block indexes cached)
|
||||
ok = leveled_bookie:book_close(Bookie2),
|
||||
{ok, Bookie2A} = leveled_bookie:book_start(StartOpts2),
|
||||
test_singledelta_stores(Bookie2A, Bookie3, small, {B1, K1}),
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie2A),
|
||||
ok = leveled_bookie:book_close(Bookie3).
|
||||
|
@ -551,3 +580,4 @@ dollar_bucket_index(_Config) ->
|
|||
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
testutil:reset_filestructure().
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue