Initial ct test
Which exposed it wasn't working. If there is no segment list passed - just a modification filter, you don't need to check the position list (as checking the position list returns an empty position so sipping all the matching results!)
This commit is contained in:
parent
aaccd09a98
commit
19bfe48564
4 changed files with 180 additions and 22 deletions
|
@ -951,6 +951,7 @@ book_headfold(Pid, Tag, {bucket_list, BucketList}, FoldAccT, JournalCheck, SnapP
|
||||||
book_returnfolder(Pid, RunnerType);
|
book_returnfolder(Pid, RunnerType);
|
||||||
book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapPreFold,
|
book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapPreFold,
|
||||||
SegmentList, LastModRange, MaxObjectCount) ->
|
SegmentList, LastModRange, MaxObjectCount) ->
|
||||||
|
|
||||||
RunnerType =
|
RunnerType =
|
||||||
{foldheads_bybucket, Tag, Bucket, KeyRange, FoldAccT,
|
{foldheads_bybucket, Tag, Bucket, KeyRange, FoldAccT,
|
||||||
JournalCheck, SnapPreFold,
|
JournalCheck, SnapPreFold,
|
||||||
|
|
|
@ -1590,10 +1590,9 @@ binarysplit_mapfun(MultiSlotBin, StartPos) ->
|
||||||
%% may be intermittently removed from the result set
|
%% may be intermittently removed from the result set
|
||||||
read_slots(Handle, SlotList, {false, 0, _BlockIndexCache},
|
read_slots(Handle, SlotList, {false, 0, _BlockIndexCache},
|
||||||
_PressMethod, _IdxModDate) ->
|
_PressMethod, _IdxModDate) ->
|
||||||
% No list of segments passed or usefult Low LastModified Date
|
% No list of segments passed or useful Low LastModified Date
|
||||||
LengthList = lists:map(fun pointer_mapfun/1, SlotList),
|
% Just read slots in SlotList
|
||||||
{MultiSlotBin, StartPos} = read_length_list(Handle, LengthList),
|
read_slotlist(SlotList, Handle);
|
||||||
lists:map(binarysplit_mapfun(MultiSlotBin, StartPos), LengthList);
|
|
||||||
read_slots(Handle, SlotList, {SegList, LowLastMod, BlockIndexCache},
|
read_slots(Handle, SlotList, {SegList, LowLastMod, BlockIndexCache},
|
||||||
PressMethod, IdxModDate) ->
|
PressMethod, IdxModDate) ->
|
||||||
% List of segments passed so only {K, V} pairs matching those segments
|
% List of segments passed so only {K, V} pairs matching those segments
|
||||||
|
@ -1609,11 +1608,7 @@ read_slots(Handle, SlotList, {SegList, LowLastMod, BlockIndexCache},
|
||||||
% If there is an attempt to use the seg list query and the
|
% If there is an attempt to use the seg list query and the
|
||||||
% index block cache isn't cached for any part this may be
|
% index block cache isn't cached for any part this may be
|
||||||
% slower as each slot will be read in turn
|
% slower as each slot will be read in turn
|
||||||
LengthDetails = pointer_mapfun(Pointer),
|
Acc ++ read_slotlist([Pointer], Handle);
|
||||||
{MultiSlotBin, StartPos} =
|
|
||||||
read_length_list(Handle, [LengthDetails]),
|
|
||||||
MapFun = binarysplit_mapfun(MultiSlotBin, StartPos),
|
|
||||||
Acc ++ [MapFun(LengthDetails)];
|
|
||||||
{BlockLengths, LMD, BlockIdx} ->
|
{BlockLengths, LMD, BlockIdx} ->
|
||||||
% If there is a BlockIndex cached then we can use it to
|
% If there is a BlockIndex cached then we can use it to
|
||||||
% check to see if any of the expected segments are
|
% check to see if any of the expected segments are
|
||||||
|
@ -1628,26 +1623,42 @@ read_slots(Handle, SlotList, {SegList, LowLastMod, BlockIndexCache},
|
||||||
true ->
|
true ->
|
||||||
% The highest LMD on the slot was before the
|
% The highest LMD on the slot was before the
|
||||||
% LowLastMod date passed in the query - therefore
|
% LowLastMod date passed in the query - therefore
|
||||||
% there ar eno interetsing modifictaions in this
|
% there are no interesting modifications in this
|
||||||
% slot - it is all too old
|
% slot - it is all too old
|
||||||
Acc;
|
Acc;
|
||||||
false ->
|
false ->
|
||||||
PositionList = find_pos(BlockIdx, SegList, [], 0),
|
case SegList of
|
||||||
Acc ++
|
false ->
|
||||||
check_blocks(PositionList,
|
% Need all the slot now
|
||||||
{Handle, SP},
|
Acc ++ read_slotlist([Pointer], Handle);
|
||||||
BlockLengths,
|
_SL ->
|
||||||
byte_size(BlockIdx),
|
% Need to find just the right keys
|
||||||
false, PressMethod, IdxModDate,
|
PositionList =
|
||||||
[])
|
find_pos(BlockIdx, SegList, [], 0),
|
||||||
% Note check_blocks shouldreturn [] if
|
Acc ++
|
||||||
% PositionList is empty
|
check_blocks(PositionList,
|
||||||
|
{Handle, SP},
|
||||||
|
BlockLengths,
|
||||||
|
byte_size(BlockIdx),
|
||||||
|
false,
|
||||||
|
PressMethod,
|
||||||
|
IdxModDate,
|
||||||
|
[])
|
||||||
|
% Note check_blocks shouldreturn [] if
|
||||||
|
% PositionList is empty
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
lists:foldl(BinMapFun, [], SlotList).
|
lists:foldl(BinMapFun, [], SlotList).
|
||||||
|
|
||||||
|
|
||||||
|
read_slotlist(SlotList, Handle) ->
|
||||||
|
LengthList = lists:map(fun pointer_mapfun/1, SlotList),
|
||||||
|
{MultiSlotBin, StartPos} = read_length_list(Handle, LengthList),
|
||||||
|
lists:map(binarysplit_mapfun(MultiSlotBin, StartPos), LengthList).
|
||||||
|
|
||||||
|
|
||||||
-spec binaryslot_reader(list(binaryslot_element()),
|
-spec binaryslot_reader(list(binaryslot_element()),
|
||||||
press_method(),
|
press_method(),
|
||||||
boolean(),
|
boolean(),
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
-include("include/leveled.hrl").
|
-include("include/leveled.hrl").
|
||||||
-export([all/0]).
|
-export([all/0]).
|
||||||
-export([
|
-export([
|
||||||
|
fetchclocks_modifiedbetween/1,
|
||||||
crossbucket_aae/1,
|
crossbucket_aae/1,
|
||||||
handoff/1,
|
handoff/1,
|
||||||
dollar_bucket_index/1,
|
dollar_bucket_index/1,
|
||||||
|
@ -10,6 +11,7 @@
|
||||||
]).
|
]).
|
||||||
|
|
||||||
all() -> [
|
all() -> [
|
||||||
|
fetchclocks_modifiedbetween,
|
||||||
crossbucket_aae,
|
crossbucket_aae,
|
||||||
handoff,
|
handoff,
|
||||||
dollar_bucket_index,
|
dollar_bucket_index,
|
||||||
|
@ -18,6 +20,147 @@ all() -> [
|
||||||
|
|
||||||
-define(MAGIC, 53). % riak_kv -> riak_object
|
-define(MAGIC, 53). % riak_kv -> riak_object
|
||||||
|
|
||||||
|
|
||||||
|
fetchclocks_modifiedbetween(_Config) ->
|
||||||
|
RootPathA = testutil:reset_filestructure("fetchClockA"),
|
||||||
|
RootPathB = testutil:reset_filestructure("fetchClockB"),
|
||||||
|
StartOpts1A = [{root_path, RootPathA},
|
||||||
|
{max_journalsize, 500000000},
|
||||||
|
{max_pencillercachesize, 16000},
|
||||||
|
{sync_strategy, testutil:sync_strategy()}],
|
||||||
|
StartOpts1B = [{root_path, RootPathB},
|
||||||
|
{max_journalsize, 500000000},
|
||||||
|
{max_pencillercachesize, 16000},
|
||||||
|
{sync_strategy, testutil:sync_strategy()}],
|
||||||
|
{ok, Bookie1A} = leveled_bookie:book_start(StartOpts1A),
|
||||||
|
{ok, Bookie1B} = leveled_bookie:book_start(StartOpts1B),
|
||||||
|
|
||||||
|
_ObjL1StartTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
ObjList1 =
|
||||||
|
testutil:generate_objects(20000,
|
||||||
|
{fixed_binary, 1}, [],
|
||||||
|
leveled_rand:rand_bytes(512),
|
||||||
|
fun() -> [] end,
|
||||||
|
<<"B0">>),
|
||||||
|
timer:sleep(1000),
|
||||||
|
_ObjL1EndTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
timer:sleep(1000),
|
||||||
|
|
||||||
|
_ObjL2StartTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
ObjList2 =
|
||||||
|
testutil:generate_objects(15000,
|
||||||
|
{fixed_binary, 20001}, [],
|
||||||
|
leveled_rand:rand_bytes(512),
|
||||||
|
fun() -> [] end,
|
||||||
|
<<"B0">>),
|
||||||
|
timer:sleep(1000),
|
||||||
|
_ObjList2EndTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
timer:sleep(1000),
|
||||||
|
|
||||||
|
ObjL3StartTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
ObjList3 =
|
||||||
|
testutil:generate_objects(35000,
|
||||||
|
{fixed_binary, 35001}, [],
|
||||||
|
leveled_rand:rand_bytes(512),
|
||||||
|
fun() -> [] end,
|
||||||
|
<<"B0">>),
|
||||||
|
timer:sleep(1000),
|
||||||
|
ObjL3EndTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
timer:sleep(1000),
|
||||||
|
|
||||||
|
_ObjL4StartTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
ObjList4 =
|
||||||
|
testutil:generate_objects(30000,
|
||||||
|
{fixed_binary, 70001}, [],
|
||||||
|
leveled_rand:rand_bytes(512),
|
||||||
|
fun() -> [] end,
|
||||||
|
"B0"),
|
||||||
|
timer:sleep(1000),
|
||||||
|
_ObjL4EndTS = testutil:convert_to_seconds(os:timestamp()),
|
||||||
|
timer:sleep(1000),
|
||||||
|
|
||||||
|
testutil:riakload(Bookie1A, ObjList1),
|
||||||
|
testutil:riakload(Bookie1A, ObjList2),
|
||||||
|
testutil:riakload(Bookie1A, ObjList3),
|
||||||
|
testutil:riakload(Bookie1A, ObjList4),
|
||||||
|
|
||||||
|
testutil:riakload(Bookie1B, ObjList1),
|
||||||
|
testutil:riakload(Bookie1B, ObjList3),
|
||||||
|
testutil:riakload(Bookie1B, ObjList4),
|
||||||
|
|
||||||
|
RevertFixedBinKey =
|
||||||
|
fun(FBK) ->
|
||||||
|
<<$K, $e, $y, KeyNumber:64/integer>> = FBK,
|
||||||
|
KeyNumber
|
||||||
|
end,
|
||||||
|
StoreFoldFun =
|
||||||
|
fun(_B, K, _V, {_LK, AccC}) ->
|
||||||
|
{RevertFixedBinKey(K), AccC + 1}
|
||||||
|
end,
|
||||||
|
|
||||||
|
KeyRangeFun =
|
||||||
|
fun(StartNumber, EndNumber) ->
|
||||||
|
{range,
|
||||||
|
<<"B0">>,
|
||||||
|
{testutil:fixed_bin_key(StartNumber),
|
||||||
|
testutil:fixed_bin_key(EndNumber)}}
|
||||||
|
end,
|
||||||
|
|
||||||
|
% Count with max object count
|
||||||
|
FoldRangesFun =
|
||||||
|
fun(FoldTarget, ModRange, EndNumber) ->
|
||||||
|
fun(_I, {LKN, KC}) ->
|
||||||
|
{async, Runner} =
|
||||||
|
leveled_bookie:book_headfold(FoldTarget,
|
||||||
|
?RIAK_TAG,
|
||||||
|
KeyRangeFun(LKN + 1,
|
||||||
|
EndNumber),
|
||||||
|
{StoreFoldFun, {LKN, KC}},
|
||||||
|
false,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
ModRange,
|
||||||
|
13000),
|
||||||
|
{_, {LKN0, KC0}} = Runner(),
|
||||||
|
{LKN0, KC0}
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
|
||||||
|
R1A = lists:foldl(FoldRangesFun(Bookie1A, false, 50000),
|
||||||
|
{0, 0}, lists:seq(1, 4)),
|
||||||
|
io:format("R1A ~w~n", [R1A]),
|
||||||
|
true = {50000, 50000} == R1A,
|
||||||
|
|
||||||
|
R1B = lists:foldl(FoldRangesFun(Bookie1B, false, 50000),
|
||||||
|
{0, 0}, lists:seq(1, 3)),
|
||||||
|
io:format("R1B ~w~n", [R1B]),
|
||||||
|
true = {50000, 35000} == R1B,
|
||||||
|
|
||||||
|
R2A = lists:foldl(FoldRangesFun(Bookie1A,
|
||||||
|
{ObjL3StartTS, ObjL3EndTS},
|
||||||
|
60000),
|
||||||
|
{10000, 0}, lists:seq(1, 2)),
|
||||||
|
io:format("R2A ~w~n", [R2A]),
|
||||||
|
true = {60000, 25000} == R2A,
|
||||||
|
R2A_SR = lists:foldl(FoldRangesFun(Bookie1A,
|
||||||
|
{ObjL3StartTS, ObjL3EndTS},
|
||||||
|
60000),
|
||||||
|
{10000, 0}, lists:seq(1, 1)),
|
||||||
|
io:format("R2A_SingleRotation ~w~n", [R2A]),
|
||||||
|
true = {48000, 13000} == R2A_SR, % Hit at max results
|
||||||
|
R2B = lists:foldl(FoldRangesFun(Bookie1B,
|
||||||
|
{ObjL3StartTS, ObjL3EndTS},
|
||||||
|
60000),
|
||||||
|
{10000, 0}, lists:seq(1, 2)),
|
||||||
|
io:format("R2B ~w~n", [R1B]),
|
||||||
|
true = {60000, 25000} == R2B,
|
||||||
|
|
||||||
|
|
||||||
|
ok = leveled_bookie:book_destroy(Bookie1A),
|
||||||
|
ok = leveled_bookie:book_destroy(Bookie1B).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
crossbucket_aae(_Config) ->
|
crossbucket_aae(_Config) ->
|
||||||
% Test requires multiple different databases, so want to mount them all
|
% Test requires multiple different databases, so want to mount them all
|
||||||
% on individual file paths
|
% on individual file paths
|
||||||
|
|
|
@ -51,8 +51,9 @@
|
||||||
sync_strategy/0,
|
sync_strategy/0,
|
||||||
riak_object/4,
|
riak_object/4,
|
||||||
get_value_from_objectlistitem/1,
|
get_value_from_objectlistitem/1,
|
||||||
numbered_key/1,
|
numbered_key/1,
|
||||||
fixed_bin_key/1]).
|
fixed_bin_key/1,
|
||||||
|
convert_to_seconds/1]).
|
||||||
|
|
||||||
-define(RETURN_TERMS, {true, undefined}).
|
-define(RETURN_TERMS, {true, undefined}).
|
||||||
-define(SLOWOFFER_DELAY, 5).
|
-define(SLOWOFFER_DELAY, 5).
|
||||||
|
@ -771,3 +772,5 @@ find_journals(RootPath) ->
|
||||||
FNsA_J),
|
FNsA_J),
|
||||||
CDBFiles.
|
CDBFiles.
|
||||||
|
|
||||||
|
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
|
||||||
|
MegaSec * 1000000 + Seconds.
|
Loading…
Add table
Add a link
Reference in a new issue