Merge pull request #68 from martinsumner/mas-isempty-i67
Mas isempty i67
This commit is contained in:
commit
5c5fa8ee8d
3 changed files with 100 additions and 30 deletions
|
@ -179,7 +179,7 @@ book_start(Opts) ->
|
|||
%% @doc Put an object with an expiry time
|
||||
%%
|
||||
%% Put an item in the store but with a Time To Live - the time when the object
|
||||
%% should expire, in gregorian_sconds (add the required number of seconds to
|
||||
%% should expire, in gregorian_seconds (add the required number of seconds to
|
||||
%% leveled_codec:integer_time/1).
|
||||
%%
|
||||
%% There exists the possibility of per object expiry times, not just whole
|
||||
|
@ -729,6 +729,7 @@ binary_bucketlist(State, Tag, {FoldBucketsFun, InitAcc}) ->
|
|||
no_lookup),
|
||||
Folder = fun() ->
|
||||
BucketAcc = get_nextbucket(null,
|
||||
null,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
[]),
|
||||
|
@ -739,26 +740,40 @@ binary_bucketlist(State, Tag, {FoldBucketsFun, InitAcc}) ->
|
|||
end,
|
||||
{async, Folder}.
|
||||
|
||||
get_nextbucket(NextBucket, Tag, LedgerSnapshot, BKList) ->
|
||||
StartKey = leveled_codec:to_ledgerkey(NextBucket, null, Tag),
|
||||
get_nextbucket(NextBucket, NextKey, Tag, LedgerSnapshot, BKList) ->
|
||||
Now = leveled_codec:integer_now(),
|
||||
StartKey = leveled_codec:to_ledgerkey(NextBucket, NextKey, Tag),
|
||||
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
||||
ExtractFun = fun(LK, _V, _Acc) -> leveled_codec:from_ledgerkey(LK) end,
|
||||
BK = leveled_penciller:pcl_fetchnextkey(LedgerSnapshot,
|
||||
StartKey,
|
||||
EndKey,
|
||||
ExtractFun,
|
||||
null),
|
||||
case BK of
|
||||
ExtractFun =
|
||||
fun(LK, V, _Acc) ->
|
||||
{leveled_codec:from_ledgerkey(LK), V}
|
||||
end,
|
||||
R = leveled_penciller:pcl_fetchnextkey(LedgerSnapshot,
|
||||
StartKey,
|
||||
EndKey,
|
||||
ExtractFun,
|
||||
null),
|
||||
case R of
|
||||
null ->
|
||||
leveled_log:log("B0008",[]),
|
||||
BKList;
|
||||
{B, K} when is_binary(B) ->
|
||||
leveled_log:log("B0009",[B]),
|
||||
get_nextbucket(<<B/binary, 0>>,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
[{B, K}|BKList]);
|
||||
NB ->
|
||||
{{B, K}, V} when is_binary(B), is_binary(K) ->
|
||||
case leveled_codec:is_active({B, K}, V, Now) of
|
||||
true ->
|
||||
leveled_log:log("B0009",[B]),
|
||||
get_nextbucket(<<B/binary, 0>>,
|
||||
null,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
[{B, K}|BKList]);
|
||||
false ->
|
||||
get_nextbucket(B,
|
||||
<<K/binary, 0>>,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
BKList)
|
||||
end;
|
||||
{NB, _V} ->
|
||||
leveled_log:log("B0010",[NB]),
|
||||
[]
|
||||
end.
|
||||
|
@ -1308,11 +1323,11 @@ maybe_withjitter(CacheSize, MaxCacheSize) ->
|
|||
|
||||
|
||||
|
||||
load_fun(KeyInLedger, ValueInLedger, _Position, Acc0, ExtractFun) ->
|
||||
load_fun(KeyInJournal, ValueInJournal, _Position, Acc0, ExtractFun) ->
|
||||
{MinSQN, MaxSQN, OutputTree} = Acc0,
|
||||
{SQN, Type, PK} = KeyInLedger,
|
||||
{SQN, Type, PK} = KeyInJournal,
|
||||
% VBin may already be a term
|
||||
{VBin, VSize} = ExtractFun(ValueInLedger),
|
||||
{VBin, VSize} = ExtractFun(ValueInJournal),
|
||||
{Obj, IndexSpecs} = leveled_codec:split_inkvalue(VBin),
|
||||
case SQN of
|
||||
SQN when SQN < MinSQN ->
|
||||
|
|
|
@ -8,7 +8,8 @@
|
|||
fetchput_snapshot/1,
|
||||
load_and_count/1,
|
||||
load_and_count_withdelete/1,
|
||||
space_clear_ondelete/1
|
||||
space_clear_ondelete/1,
|
||||
is_empty_test/1
|
||||
]).
|
||||
|
||||
all() -> [
|
||||
|
@ -18,7 +19,8 @@ all() -> [
|
|||
fetchput_snapshot,
|
||||
load_and_count,
|
||||
load_and_count_withdelete,
|
||||
space_clear_ondelete
|
||||
space_clear_ondelete,
|
||||
is_empty_test
|
||||
].
|
||||
|
||||
|
||||
|
@ -591,4 +593,58 @@ space_clear_ondelete(_Config) ->
|
|||
true = length(FNsD_L) < length(FNsA_L),
|
||||
true = length(FNsD_L) < length(FNsB_L),
|
||||
true = length(FNsD_L) < length(FNsC_L),
|
||||
true = length(FNsD_L) == 0.
|
||||
true = length(FNsD_L) == 0.
|
||||
|
||||
|
||||
|
||||
is_empty_test(_Config) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
StartOpts1 = [{root_path, RootPath},
|
||||
{sync_strategy, testutil:sync_strategy()}],
|
||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
|
||||
{B1, K1, V1, Spec, MD} = {term_to_binary("Bucket1"),
|
||||
term_to_binary("Key1"),
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{TestObject1, TestSpec1} =
|
||||
testutil:generate_testobject(B1, K1, V1, Spec, MD),
|
||||
{B1, K2, V2, Spec, MD} = {term_to_binary("Bucket1"),
|
||||
term_to_binary("Key2"),
|
||||
"Value2",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{TestObject2, TestSpec2} =
|
||||
testutil:generate_testobject(B1, K2, V2, Spec, MD),
|
||||
{B2, K3, V3, Spec, MD} = {term_to_binary("Bucket2"),
|
||||
term_to_binary("Key3"),
|
||||
"Value3",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{TestObject3, TestSpec3} =
|
||||
testutil:generate_testobject(B2, K3, V3, Spec, MD),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject1, TestSpec1),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject2, TestSpec2),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject3, TestSpec3),
|
||||
|
||||
FoldBucketsFun = fun(B, Acc) -> sets:add_element(B, Acc) end,
|
||||
BucketListQuery = {binary_bucketlist,
|
||||
?RIAK_TAG,
|
||||
{FoldBucketsFun, sets:new()}},
|
||||
{async, BL} = leveled_bookie:book_returnfolder(Bookie1, BucketListQuery),
|
||||
true = sets:size(BL()) == 2,
|
||||
|
||||
ok = leveled_bookie:book_put(Bookie1, B2, K3, delete, [], ?RIAK_TAG),
|
||||
{async, BLpd1} = leveled_bookie:book_returnfolder(Bookie1, BucketListQuery),
|
||||
true = sets:size(BLpd1()) == 1,
|
||||
|
||||
ok = leveled_bookie:book_put(Bookie1, B1, K2, delete, [], ?RIAK_TAG),
|
||||
{async, BLpd2} = leveled_bookie:book_returnfolder(Bookie1, BucketListQuery),
|
||||
true = sets:size(BLpd2()) == 1,
|
||||
|
||||
ok = leveled_bookie:book_put(Bookie1, B1, K1, delete, [], ?RIAK_TAG),
|
||||
{async, BLpd3} = leveled_bookie:book_returnfolder(Bookie1, BucketListQuery),
|
||||
true = sets:size(BLpd3()) == 0,
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie1).
|
||||
|
|
|
@ -97,7 +97,7 @@ small_load_with2i(_Config) ->
|
|||
IdxQ1 = {index_query,
|
||||
"Bucket",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", "#", "~"},
|
||||
{"idx1_bin", "#", "|"},
|
||||
{true, undefined}},
|
||||
{async, IdxFolder} = leveled_bookie:book_returnfolder(Bookie1, IdxQ1),
|
||||
KeyList1 = lists:usort(IdxFolder()),
|
||||
|
@ -106,7 +106,7 @@ small_load_with2i(_Config) ->
|
|||
IdxQ2 = {index_query,
|
||||
{"Bucket", LastKey},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx1_bin", LastTerm, "~"},
|
||||
{"idx1_bin", LastTerm, "|"},
|
||||
{false, undefined}},
|
||||
{async, IdxFolderLK} = leveled_bookie:book_returnfolder(Bookie1, IdxQ2),
|
||||
KeyList2 = lists:usort(IdxFolderLK()),
|
||||
|
@ -141,7 +141,7 @@ small_load_with2i(_Config) ->
|
|||
?RIAK_TAG,
|
||||
"Bucket",
|
||||
{"idx1_bin",
|
||||
"#", "~"},
|
||||
"#", "|"},
|
||||
FoldObjectsFun}),
|
||||
KeyHashList3 = HTreeF3(),
|
||||
true = 9901 == length(KeyHashList1), % also includes the test object
|
||||
|
@ -186,7 +186,7 @@ query_count(_Config) ->
|
|||
testutil:sync_strategy()),
|
||||
BucketBin = list_to_binary("Bucket"),
|
||||
{TestObject, TestSpec} = testutil:generate_testobject(BucketBin,
|
||||
"Key1",
|
||||
term_to_binary("Key1"),
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]),
|
||||
|
@ -269,7 +269,7 @@ query_count(_Config) ->
|
|||
Query1 = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{"idx2_bin", "2000", "2000~"},
|
||||
{"idx2_bin", "2000", "2000|"},
|
||||
{false, RegMia}},
|
||||
{async,
|
||||
Mia2KFolder1} = leveled_bookie:book_returnfolder(Book2, Query1),
|
||||
|
@ -396,8 +396,7 @@ query_count(_Config) ->
|
|||
io:format("Bucket set returned in ~w microseconds",
|
||||
[timer:now_diff(os:timestamp(), SW_QA)]),
|
||||
|
||||
true = sets:size(BucketSet1) == 1,
|
||||
true = sets:is_element(list_to_binary("Bucket"), BucketSet1),
|
||||
true = sets:size(BucketSet1) == 1,
|
||||
|
||||
ObjList10A = testutil:generate_objects(5000,
|
||||
binary_uuid,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue