Add tests for is_empty
Where keys are strings or integers, and where subkeys are involved
This commit is contained in:
parent
65969c36d0
commit
41fb83abd1
3 changed files with 53 additions and 9 deletions
|
@ -2166,6 +2166,37 @@ is_empty_test() ->
|
|||
|
||||
ok = leveled_bookie:book_close(Bookie1).
|
||||
|
||||
is_empty_headonly_test() ->
|
||||
RootPath = reset_filestructure(),
|
||||
{ok, Bookie1} = book_start([{root_path, RootPath},
|
||||
{max_journalsize, 1000000},
|
||||
{cache_size, 500},
|
||||
{head_only, no_lookup}]),
|
||||
?assertMatch(true, book_isempty(Bookie1, ?HEAD_TAG)),
|
||||
ObjSpecs =
|
||||
[{add, <<"B1">>, <<"K1">>, 1, 100},
|
||||
{remove, <<"B1">>, <<"K1">>, 0, null}],
|
||||
ok = book_mput(Bookie1, ObjSpecs),
|
||||
?assertMatch(false, book_isempty(Bookie1, ?HEAD_TAG)),
|
||||
ok = book_close(Bookie1).
|
||||
|
||||
is_empty_stringkey_test() ->
|
||||
RootPath = reset_filestructure(),
|
||||
{ok, Bookie1} = book_start([{root_path, RootPath},
|
||||
{max_journalsize, 1000000},
|
||||
{cache_size, 500}]),
|
||||
?assertMatch(true, book_isempty(Bookie1, ?STD_TAG)),
|
||||
Past = leveled_util:integer_now() - 300,
|
||||
?assertMatch(true, leveled_bookie:book_isempty(Bookie1, ?STD_TAG)),
|
||||
ok = book_tempput(Bookie1,
|
||||
"B", "K", {value, <<"V">>}, [],
|
||||
?STD_TAG, Past),
|
||||
ok = book_put(Bookie1,
|
||||
"B", "K0", {value, <<"V">>}, [],
|
||||
?STD_TAG),
|
||||
?assertMatch(false, book_isempty(Bookie1, ?STD_TAG)),
|
||||
ok = book_close(Bookie1).
|
||||
|
||||
scan_table_test() ->
|
||||
K1 = leveled_codec:to_ledgerkey(<<"B1">>,
|
||||
<<"K1">>,
|
||||
|
|
|
@ -67,7 +67,8 @@
|
|||
riak_extract_metadata/2,
|
||||
segment_hash/1,
|
||||
to_lookup/1,
|
||||
riak_metadata_to_binary/2]).
|
||||
riak_metadata_to_binary/2,
|
||||
next_key/1]).
|
||||
|
||||
-define(V1_VERS, 1).
|
||||
-define(MAGIC, 53). % riak_kv -> riak_object
|
||||
|
@ -252,6 +253,8 @@ from_ledgerkey({?IDX_TAG, ?ALL_BUCKETS, {_IdxFld, IdxVal}, {Bucket, Key}}) ->
|
|||
{Bucket, Key, IdxVal};
|
||||
from_ledgerkey({?IDX_TAG, Bucket, {_IdxFld, IdxVal}, Key}) ->
|
||||
{Bucket, Key, IdxVal};
|
||||
from_ledgerkey({?HEAD_TAG, Bucket, Key, SubKey}) ->
|
||||
{Bucket, {Key, SubKey}};
|
||||
from_ledgerkey({_Tag, Bucket, Key, _SubKey}) ->
|
||||
{Bucket, Key}.
|
||||
|
||||
|
@ -835,6 +838,13 @@ get_metadata_from_siblings(<<ValLen:32/integer, Rest0/binary>>,
|
|||
[LastMod|LastMods]).
|
||||
|
||||
|
||||
next_key(Key) when is_binary(Key) ->
|
||||
<<Key/binary, 0>>;
|
||||
next_key(Key) when is_integer(Key) ->
|
||||
Key + 1;
|
||||
next_key(Key) when is_list(Key) ->
|
||||
Key ++ [0].
|
||||
|
||||
|
||||
|
||||
%%%============================================================================
|
||||
|
|
|
@ -437,23 +437,26 @@ get_nextbucket(NextBucket, NextKey, Tag, LedgerSnapshot, BKList, {C, L}) ->
|
|||
null ->
|
||||
leveled_log:log("B0008",[]),
|
||||
BKList;
|
||||
{{B, K}, V} when is_binary(B), is_binary(K) ->
|
||||
{{B, K}, V} ->
|
||||
case leveled_codec:is_active({Tag, B, K, null}, V, Now) of
|
||||
true ->
|
||||
leveled_log:log("B0009",[B]),
|
||||
get_nextbucket(<<B/binary, 0>>,
|
||||
get_nextbucket(leveled_codec:next_key(B),
|
||||
null,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
[{B, K}|BKList],
|
||||
{C + 1, L});
|
||||
false ->
|
||||
get_nextbucket(B,
|
||||
<<K/binary, 0>>,
|
||||
Tag,
|
||||
LedgerSnapshot,
|
||||
BKList,
|
||||
{C, L})
|
||||
NK =
|
||||
case Tag of
|
||||
?HEAD_TAG ->
|
||||
{PK, SK} = K,
|
||||
{PK, leveled_codec:next_key(SK)};
|
||||
_ ->
|
||||
leveled_codec:next_key(K)
|
||||
end,
|
||||
get_nextbucket(B, NK, Tag, LedgerSnapshot, BKList, {C, L})
|
||||
end;
|
||||
{NB, _V} ->
|
||||
leveled_log:log("B0010",[NB]),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue