Allow for all keys head folds to used modifed range
This helps with kv_index_tictcatree with the leveled_so backend. Now this cna do folds over ranges of keys with modified filters (as folds over ranges of keys must go over lal keys if the backend is segment_ordered)
This commit is contained in:
parent
aa123a80a7
commit
71fa1447e0
5 changed files with 41 additions and 33 deletions
|
@ -864,8 +864,9 @@ book_objectfold(Pid, Tag, Bucket, Limiter, FoldAccT, SnapPreFold) ->
|
||||||
SegmentList :: false | list(integer()),
|
SegmentList :: false | list(integer()),
|
||||||
Runner :: fun(() -> Acc).
|
Runner :: fun(() -> Acc).
|
||||||
book_headfold(Pid, Tag, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
book_headfold(Pid, Tag, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
||||||
RunnerType = {foldheads_allkeys, Tag, FoldAccT, JournalCheck, SnapPreFold, SegmentList},
|
book_headfold(Pid, Tag, all,
|
||||||
book_returnfolder(Pid, RunnerType).
|
FoldAccT, JournalCheck, SnapPreFold,
|
||||||
|
SegmentList, false, false).
|
||||||
|
|
||||||
%% @doc as book_headfold/6, but with the addition of a `Limiter' that
|
%% @doc as book_headfold/6, but with the addition of a `Limiter' that
|
||||||
%% restricts the set of objects folded over. `Limiter' can either be a
|
%% restricts the set of objects folded over. `Limiter' can either be a
|
||||||
|
@ -897,16 +898,10 @@ book_headfold(Pid, Tag, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
||||||
SnapPreFold :: boolean(),
|
SnapPreFold :: boolean(),
|
||||||
SegmentList :: false | list(integer()),
|
SegmentList :: false | list(integer()),
|
||||||
Runner :: fun(() -> Acc).
|
Runner :: fun(() -> Acc).
|
||||||
book_headfold(Pid, Tag, {bucket_list, BucketList}, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
book_headfold(Pid, Tag, Limiter, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
||||||
RunnerType =
|
book_headfold(Pid, Tag, Limiter,
|
||||||
{foldheads_bybucket, Tag, BucketList, bucket_list, FoldAccT,
|
FoldAccT, JournalCheck, SnapPreFold,
|
||||||
JournalCheck, SnapPreFold, SegmentList, false, false},
|
SegmentList, false, false).
|
||||||
book_returnfolder(Pid, RunnerType);
|
|
||||||
book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapPreFold, SegmentList) ->
|
|
||||||
RunnerType =
|
|
||||||
{foldheads_bybucket, Tag, Bucket, KeyRange, FoldAccT,
|
|
||||||
JournalCheck, SnapPreFold, SegmentList, false, false},
|
|
||||||
book_returnfolder(Pid, RunnerType).
|
|
||||||
|
|
||||||
%% @doc as book_headfold/7, but with the addition of a Last Modified Date
|
%% @doc as book_headfold/7, but with the addition of a Last Modified Date
|
||||||
%% Range and Max Object Count. For version 2 objects this will filter out
|
%% Range and Max Object Count. For version 2 objects this will filter out
|
||||||
|
@ -927,7 +922,7 @@ book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapP
|
||||||
SegmentList, LastModRange, MaxObjectCount) ->
|
SegmentList, LastModRange, MaxObjectCount) ->
|
||||||
{async, Runner} when
|
{async, Runner} when
|
||||||
Tag :: leveled_codec:tag(),
|
Tag :: leveled_codec:tag(),
|
||||||
Limiter :: BucketList | BucketKeyRange,
|
Limiter :: BucketList | BucketKeyRange | all,
|
||||||
BucketList :: {bucket_list, list(Bucket)},
|
BucketList :: {bucket_list, list(Bucket)},
|
||||||
BucketKeyRange :: {range, Bucket, KeyRange},
|
BucketKeyRange :: {range, Bucket, KeyRange},
|
||||||
KeyRange :: {StartKey, EndKey} | all,
|
KeyRange :: {StartKey, EndKey} | all,
|
||||||
|
@ -953,12 +948,17 @@ book_headfold(Pid, Tag, {bucket_list, BucketList}, FoldAccT, JournalCheck, SnapP
|
||||||
SegmentList, LastModRange, MaxObjectCount},
|
SegmentList, LastModRange, MaxObjectCount},
|
||||||
book_returnfolder(Pid, RunnerType);
|
book_returnfolder(Pid, RunnerType);
|
||||||
book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapPreFold,
|
book_headfold(Pid, Tag, {range, Bucket, KeyRange}, FoldAccT, JournalCheck, SnapPreFold,
|
||||||
SegmentList, LastModRange, MaxObjectCount) ->
|
SegmentList, LastModRange, MaxObjectCount) ->
|
||||||
|
|
||||||
RunnerType =
|
RunnerType =
|
||||||
{foldheads_bybucket, Tag, Bucket, KeyRange, FoldAccT,
|
{foldheads_bybucket, Tag, Bucket, KeyRange, FoldAccT,
|
||||||
JournalCheck, SnapPreFold,
|
JournalCheck, SnapPreFold,
|
||||||
SegmentList, LastModRange, MaxObjectCount},
|
SegmentList, LastModRange, MaxObjectCount},
|
||||||
|
book_returnfolder(Pid, RunnerType);
|
||||||
|
book_headfold(Pid, Tag, all, FoldAccT, JournalCheck, SnapPreFold,
|
||||||
|
SegmentList, LastModRange, MaxObjectCount) ->
|
||||||
|
RunnerType = {foldheads_allkeys, Tag, FoldAccT,
|
||||||
|
JournalCheck, SnapPreFold,
|
||||||
|
SegmentList, LastModRange, MaxObjectCount},
|
||||||
book_returnfolder(Pid, RunnerType).
|
book_returnfolder(Pid, RunnerType).
|
||||||
|
|
||||||
-spec book_snapshot(pid(),
|
-spec book_snapshot(pid(),
|
||||||
|
@ -1615,12 +1615,14 @@ get_runner(State, {keylist, Tag, Bucket, KeyRange, FoldAccT, TermRegex}) ->
|
||||||
get_runner(State,
|
get_runner(State,
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
Tag, FoldFun,
|
Tag, FoldFun,
|
||||||
JournalCheck, SnapPreFold, SegmentList}) ->
|
JournalCheck, SnapPreFold, SegmentList,
|
||||||
|
LastModRange, MaxObjectCount}) ->
|
||||||
SnapType = snaptype_by_presence(JournalCheck),
|
SnapType = snaptype_by_presence(JournalCheck),
|
||||||
SnapFun = return_snapfun(State, SnapType, no_lookup, true, SnapPreFold),
|
SnapFun = return_snapfun(State, SnapType, no_lookup, true, SnapPreFold),
|
||||||
leveled_runner:foldheads_allkeys(SnapFun,
|
leveled_runner:foldheads_allkeys(SnapFun,
|
||||||
Tag, FoldFun,
|
Tag, FoldFun,
|
||||||
JournalCheck, SegmentList);
|
JournalCheck, SegmentList,
|
||||||
|
LastModRange, MaxObjectCount);
|
||||||
get_runner(State,
|
get_runner(State,
|
||||||
{foldobjects_allkeys, Tag, FoldFun, SnapPreFold}) ->
|
{foldobjects_allkeys, Tag, FoldFun, SnapPreFold}) ->
|
||||||
get_runner(State,
|
get_runner(State,
|
||||||
|
@ -2494,7 +2496,7 @@ foldobjects_vs_hashtree_testto() ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?STD_TAG,
|
?STD_TAG,
|
||||||
FoldHeadsFun,
|
FoldHeadsFun,
|
||||||
true, true, false}),
|
true, true, false, false, false}),
|
||||||
KeyHashList3 = HTFolder3(),
|
KeyHashList3 = HTFolder3(),
|
||||||
?assertMatch(KeyHashList1, lists:usort(KeyHashList3)),
|
?assertMatch(KeyHashList1, lists:usort(KeyHashList3)),
|
||||||
|
|
||||||
|
@ -2513,7 +2515,7 @@ foldobjects_vs_hashtree_testto() ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?STD_TAG,
|
?STD_TAG,
|
||||||
FoldHeadsFun2,
|
FoldHeadsFun2,
|
||||||
false, false, false}),
|
false, false, false, false, false}),
|
||||||
KeyHashList4 = HTFolder4(),
|
KeyHashList4 = HTFolder4(),
|
||||||
?assertMatch(KeyHashList1, lists:usort(KeyHashList4)),
|
?assertMatch(KeyHashList1, lists:usort(KeyHashList4)),
|
||||||
|
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
bucketkey_query/6,
|
bucketkey_query/6,
|
||||||
hashlist_query/3,
|
hashlist_query/3,
|
||||||
tictactree/5,
|
tictactree/5,
|
||||||
foldheads_allkeys/5,
|
foldheads_allkeys/7,
|
||||||
foldobjects_allkeys/4,
|
foldobjects_allkeys/4,
|
||||||
foldheads_bybucket/8,
|
foldheads_bybucket/8,
|
||||||
foldobjects_bybucket/4,
|
foldobjects_bybucket/4,
|
||||||
|
@ -270,12 +270,14 @@ tictactree(SnapFun, {Tag, Bucket, Query}, JournalCheck, TreeSize, Filter) ->
|
||||||
{async, Runner}.
|
{async, Runner}.
|
||||||
|
|
||||||
-spec foldheads_allkeys(fun(), leveled_codec:tag(),
|
-spec foldheads_allkeys(fun(), leveled_codec:tag(),
|
||||||
fun(), boolean(), false|list(integer()))
|
fun(), boolean(), false|list(integer()),
|
||||||
-> {async, fun()}.
|
false|leveled_codec:lastmod_range(),
|
||||||
|
false|pos_integer()) -> {async, fun()}.
|
||||||
%% @doc
|
%% @doc
|
||||||
%% Fold over all heads in the store for a given tag - applying the passed
|
%% Fold over all heads in the store for a given tag - applying the passed
|
||||||
%% function to each proxy object
|
%% function to each proxy object
|
||||||
foldheads_allkeys(SnapFun, Tag, FoldFun, JournalCheck, SegmentList) ->
|
foldheads_allkeys(SnapFun, Tag, FoldFun, JournalCheck,
|
||||||
|
SegmentList, LastModRange, MaxObjectCount) ->
|
||||||
StartKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
StartKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
||||||
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
||||||
foldobjects(SnapFun,
|
foldobjects(SnapFun,
|
||||||
|
@ -283,7 +285,9 @@ foldheads_allkeys(SnapFun, Tag, FoldFun, JournalCheck, SegmentList) ->
|
||||||
[{StartKey, EndKey}],
|
[{StartKey, EndKey}],
|
||||||
FoldFun,
|
FoldFun,
|
||||||
{true, JournalCheck},
|
{true, JournalCheck},
|
||||||
SegmentList).
|
SegmentList,
|
||||||
|
LastModRange,
|
||||||
|
MaxObjectCount).
|
||||||
|
|
||||||
-spec foldobjects_allkeys(fun(), leveled_codec:tag(), fun(),
|
-spec foldobjects_allkeys(fun(), leveled_codec:tag(), fun(),
|
||||||
key_order|sqn_order) -> {async, fun()}.
|
key_order|sqn_order) -> {async, fun()}.
|
||||||
|
|
|
@ -227,7 +227,8 @@ aae_missingjournal(_Config) ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
FoldHeadsFun,
|
FoldHeadsFun,
|
||||||
true, true, false}),
|
true, true, false,
|
||||||
|
false, false}),
|
||||||
HeadL2 = length(AllHeadF2()),
|
HeadL2 = length(AllHeadF2()),
|
||||||
io:format("Fold head returned ~w objects~n", [HeadL2]),
|
io:format("Fold head returned ~w objects~n", [HeadL2]),
|
||||||
true = HeadL2 < HeadL1,
|
true = HeadL2 < HeadL1,
|
||||||
|
|
|
@ -422,7 +422,7 @@ test_segfilter_query(Bookie, CLs) ->
|
||||||
Acc
|
Acc
|
||||||
end
|
end
|
||||||
end, 0},
|
end, 0},
|
||||||
false, true, SegL}
|
false, true, SegL, false, false}
|
||||||
end,
|
end,
|
||||||
|
|
||||||
{async, SL1Folder} =
|
{async, SL1Folder} =
|
||||||
|
@ -455,7 +455,7 @@ test_singledelta_stores(BookA, BookB, TreeSize, DeltaKey) ->
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{fun head_tictac_foldfun/4,
|
{fun head_tictac_foldfun/4,
|
||||||
{0, leveled_tictac:new_tree(test, TreeSize)}},
|
{0, leveled_tictac:new_tree(test, TreeSize)}},
|
||||||
false, true, false},
|
false, true, false, false, false},
|
||||||
% tictac query by bucket (should be same result as all stores)
|
% tictac query by bucket (should be same result as all stores)
|
||||||
TicTacByBucketFolder =
|
TicTacByBucketFolder =
|
||||||
{foldheads_bybucket,
|
{foldheads_bybucket,
|
||||||
|
@ -478,7 +478,7 @@ test_singledelta_stores(BookA, BookB, TreeSize, DeltaKey) ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{get_segment_folder(DLs, TreeSize), []},
|
{get_segment_folder(DLs, TreeSize), []},
|
||||||
false, true, false},
|
false, true, false, false, false},
|
||||||
|
|
||||||
SW_SL0 = os:timestamp(),
|
SW_SL0 = os:timestamp(),
|
||||||
{async, BookASegFolder} =
|
{async, BookASegFolder} =
|
||||||
|
@ -502,7 +502,7 @@ test_singledelta_stores(BookA, BookB, TreeSize, DeltaKey) ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{get_segment_folder(DLs, TreeSize), []},
|
{get_segment_folder(DLs, TreeSize), []},
|
||||||
false, true, SegFilterList},
|
false, true, SegFilterList, false, false},
|
||||||
|
|
||||||
SW_SL1 = os:timestamp(),
|
SW_SL1 = os:timestamp(),
|
||||||
{async, BookASegFolder1} =
|
{async, BookASegFolder1} =
|
||||||
|
@ -521,7 +521,7 @@ test_singledelta_stores(BookA, BookB, TreeSize, DeltaKey) ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{get_segment_folder(DLs, TreeSize), []},
|
{get_segment_folder(DLs, TreeSize), []},
|
||||||
true, true, SegFilterList},
|
true, true, SegFilterList, false, false},
|
||||||
|
|
||||||
SW_SL1CP = os:timestamp(),
|
SW_SL1CP = os:timestamp(),
|
||||||
{async, BookASegFolder1CP} =
|
{async, BookASegFolder1CP} =
|
||||||
|
@ -545,7 +545,7 @@ test_singledelta_stores(BookA, BookB, TreeSize, DeltaKey) ->
|
||||||
{foldheads_allkeys,
|
{foldheads_allkeys,
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{get_segment_folder(DLs, TreeSize), []},
|
{get_segment_folder(DLs, TreeSize), []},
|
||||||
false, true, SegFilterListF},
|
false, true, SegFilterListF, false, false},
|
||||||
|
|
||||||
SW_SL1F = os:timestamp(),
|
SW_SL1F = os:timestamp(),
|
||||||
{async, BookASegFolder1F} =
|
{async, BookASegFolder1F} =
|
||||||
|
@ -749,7 +749,7 @@ handoff(_Config) ->
|
||||||
?RIAK_TAG,
|
?RIAK_TAG,
|
||||||
{fun head_tictac_foldfun/4,
|
{fun head_tictac_foldfun/4,
|
||||||
{0, leveled_tictac:new_tree(test, TreeSize)}},
|
{0, leveled_tictac:new_tree(test, TreeSize)}},
|
||||||
false, true, false},
|
false, true, false, false, false},
|
||||||
check_tictacfold(Bookie1, Bookie2, TicTacFolder, none, TreeSize),
|
check_tictacfold(Bookie1, Bookie2, TicTacFolder, none, TreeSize),
|
||||||
check_tictacfold(Bookie2, Bookie3, TicTacFolder, none, TreeSize),
|
check_tictacfold(Bookie2, Bookie3, TicTacFolder, none, TreeSize),
|
||||||
check_tictacfold(Bookie3, Bookie4, TicTacFolder, none, TreeSize),
|
check_tictacfold(Bookie3, Bookie4, TicTacFolder, none, TreeSize),
|
||||||
|
|
|
@ -595,7 +595,8 @@ basic_headonly_test(ObjectCount, RemoveCount, HeadOnly) ->
|
||||||
InitAcc = {0, 0},
|
InitAcc = {0, 0},
|
||||||
|
|
||||||
RunnerDefinition =
|
RunnerDefinition =
|
||||||
{foldheads_allkeys, h, {FoldFun, InitAcc}, false, false, false},
|
{foldheads_allkeys, h, {FoldFun, InitAcc},
|
||||||
|
false, false, false, false, false},
|
||||||
{async, Runner1} =
|
{async, Runner1} =
|
||||||
leveled_bookie:book_returnfolder(Bookie1, RunnerDefinition),
|
leveled_bookie:book_returnfolder(Bookie1, RunnerDefinition),
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue