Use backwards compatible term_to_binary (#408)
* Use backwards compatible term_to_binary So that where we have hashed term_to_binary output in OTP25 or earlier, that has will be matched in OTP 26. * Test reliability If all keys are put in order, the max_slots may not be used, as the driver at L0 is penciller cache size, and merge to new files (managed by the parameter) only occurs when there are overlapping files the level below
This commit is contained in:
parent
c4a32366df
commit
b96518c32a
7 changed files with 32 additions and 16 deletions
|
@ -703,6 +703,7 @@ book_returnfolder(Pid, RunnerType) ->
|
||||||
Key :: term(),
|
Key :: term(),
|
||||||
StartKey::term(),
|
StartKey::term(),
|
||||||
FoldFun::fun((Bucket, Key | {IndexVal, Key}, Acc) -> Acc),
|
FoldFun::fun((Bucket, Key | {IndexVal, Key}, Acc) -> Acc),
|
||||||
|
Key::term(),
|
||||||
Acc::term(),
|
Acc::term(),
|
||||||
IndexField::term(),
|
IndexField::term(),
|
||||||
IndexVal::term(),
|
IndexVal::term(),
|
||||||
|
|
|
@ -202,7 +202,7 @@ headkey_to_canonicalbinary({?HEAD_TAG, {BucketType, Bucket}, Key, SubKey})
|
||||||
headkey_to_canonicalbinary(Key) when element(1, Key) == ?HEAD_TAG ->
|
headkey_to_canonicalbinary(Key) when element(1, Key) == ?HEAD_TAG ->
|
||||||
% In unit tests head specs can have non-binary keys, so handle
|
% In unit tests head specs can have non-binary keys, so handle
|
||||||
% this through hashing the whole key
|
% this through hashing the whole key
|
||||||
term_to_binary(Key).
|
leveled_util:t2b(Key).
|
||||||
|
|
||||||
|
|
||||||
-spec to_lookup(ledger_key()) -> maybe_lookup().
|
-spec to_lookup(ledger_key()) -> maybe_lookup().
|
||||||
|
|
|
@ -132,7 +132,7 @@ key_to_canonicalbinary(Key) ->
|
||||||
OverrideFun(Key).
|
OverrideFun(Key).
|
||||||
|
|
||||||
default_key_to_canonicalbinary(Key) ->
|
default_key_to_canonicalbinary(Key) ->
|
||||||
term_to_binary(Key).
|
leveled_util:t2b(Key).
|
||||||
|
|
||||||
|
|
||||||
-spec build_head(object_tag()|headonly_tag(), object_metadata()) -> head().
|
-spec build_head(object_tag()|headonly_tag(), object_metadata()) -> head().
|
||||||
|
|
|
@ -271,7 +271,7 @@ tictactree(SnapFun, {Tag, Bucket, Query}, JournalCheck, TreeSize, Filter) ->
|
||||||
true ->
|
true ->
|
||||||
{K, T};
|
{K, T};
|
||||||
false ->
|
false ->
|
||||||
{term_to_binary(K), T}
|
{leveled_util:t2b(K), T}
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
{StartKey, EndKey, ExtractFun} =
|
{StartKey, EndKey, ExtractFun} =
|
||||||
|
|
|
@ -359,7 +359,7 @@ keyto_segment32({segment_hash, SegmentID, ExtraHash, _AltHash})
|
||||||
keyto_segment32(BinKey) when is_binary(BinKey) ->
|
keyto_segment32(BinKey) when is_binary(BinKey) ->
|
||||||
keyto_segment32(keyto_segment48(BinKey));
|
keyto_segment32(keyto_segment48(BinKey));
|
||||||
keyto_segment32(Key) ->
|
keyto_segment32(Key) ->
|
||||||
keyto_segment32(term_to_binary(Key)).
|
keyto_segment32(leveled_util:t2b(Key)).
|
||||||
|
|
||||||
-spec keyto_segment48(binary()) -> segment48().
|
-spec keyto_segment48(binary()) -> segment48().
|
||||||
%% @doc
|
%% @doc
|
||||||
|
@ -607,7 +607,7 @@ simple_bysize_test_allsizes() ->
|
||||||
|
|
||||||
simple_test_withsize(Size) ->
|
simple_test_withsize(Size) ->
|
||||||
?assertMatch(true, valid_size(Size)),
|
?assertMatch(true, valid_size(Size)),
|
||||||
BinFun = fun(K, V) -> {term_to_binary(K), term_to_binary(V)} end,
|
BinFun = fun(K, V) -> {leveled_util:t2b(K), leveled_util:t2b(V)} end,
|
||||||
|
|
||||||
K1 = {o, "B1", "K1", null},
|
K1 = {o, "B1", "K1", null},
|
||||||
K2 = {o, "B1", "K2", null},
|
K2 = {o, "B1", "K2", null},
|
||||||
|
@ -650,7 +650,7 @@ simple_test_withsize(Size) ->
|
||||||
|
|
||||||
GetSegFun =
|
GetSegFun =
|
||||||
fun(TK) ->
|
fun(TK) ->
|
||||||
get_segment(keyto_segment32(term_to_binary(TK)), SC)
|
get_segment(keyto_segment32(leveled_util:t2b(TK)), SC)
|
||||||
end,
|
end,
|
||||||
|
|
||||||
DL0 = find_dirtyleaves(Tree1, Tree0),
|
DL0 = find_dirtyleaves(Tree1, Tree0),
|
||||||
|
@ -681,7 +681,7 @@ merge_bysize_xlarge_test2() ->
|
||||||
merge_test_withsize(xlarge).
|
merge_test_withsize(xlarge).
|
||||||
|
|
||||||
merge_test_withsize(Size) ->
|
merge_test_withsize(Size) ->
|
||||||
BinFun = fun(K, V) -> {term_to_binary(K), term_to_binary(V)} end,
|
BinFun = fun(K, V) -> {leveled_util:t2b(K), leveled_util:t2b(V)} end,
|
||||||
|
|
||||||
TreeX0 = new_tree(0, Size),
|
TreeX0 = new_tree(0, Size),
|
||||||
TreeX1 = add_kv(TreeX0, {o, "B1", "X1", null}, {caine, 1}, BinFun),
|
TreeX1 = add_kv(TreeX0, {o, "B1", "X1", null}, {caine, 1}, BinFun),
|
||||||
|
@ -719,7 +719,7 @@ merge_emptytree_test() ->
|
||||||
?assertMatch([], find_dirtyleaves(TreeA, TreeC)).
|
?assertMatch([], find_dirtyleaves(TreeA, TreeC)).
|
||||||
|
|
||||||
alter_segment_test() ->
|
alter_segment_test() ->
|
||||||
BinFun = fun(K, V) -> {term_to_binary(K), term_to_binary(V)} end,
|
BinFun = fun(K, V) -> {leveled_util:t2b(K), leveled_util:t2b(V)} end,
|
||||||
|
|
||||||
TreeX0 = new_tree(0, small),
|
TreeX0 = new_tree(0, small),
|
||||||
TreeX1 = add_kv(TreeX0, {o, "B1", "X1", null}, {caine, 1}, BinFun),
|
TreeX1 = add_kv(TreeX0, {o, "B1", "X1", null}, {caine, 1}, BinFun),
|
||||||
|
@ -738,7 +738,7 @@ alter_segment_test() ->
|
||||||
?assertMatch([], CompareResult).
|
?assertMatch([], CompareResult).
|
||||||
|
|
||||||
return_segment_test() ->
|
return_segment_test() ->
|
||||||
BinFun = fun(K, V) -> {term_to_binary(K), term_to_binary(V)} end,
|
BinFun = fun(K, V) -> {leveled_util:t2b(K), leveled_util:t2b(V)} end,
|
||||||
|
|
||||||
TreeX0 = new_tree(0, small),
|
TreeX0 = new_tree(0, small),
|
||||||
{TreeX1, SegID}
|
{TreeX1, SegID}
|
||||||
|
|
|
@ -11,6 +11,7 @@
|
||||||
integer_now/0,
|
integer_now/0,
|
||||||
integer_time/1,
|
integer_time/1,
|
||||||
magic_hash/1,
|
magic_hash/1,
|
||||||
|
t2b/1,
|
||||||
safe_rename/4]).
|
safe_rename/4]).
|
||||||
|
|
||||||
-define(WRITE_OPS, [binary, raw, read, write]).
|
-define(WRITE_OPS, [binary, raw, read, write]).
|
||||||
|
@ -53,7 +54,7 @@ magic_hash({binary, BinaryKey}) ->
|
||||||
H = 5381,
|
H = 5381,
|
||||||
hash1(H, BinaryKey) band 16#FFFFFFFF;
|
hash1(H, BinaryKey) band 16#FFFFFFFF;
|
||||||
magic_hash(AnyKey) ->
|
magic_hash(AnyKey) ->
|
||||||
BK = term_to_binary(AnyKey),
|
BK = t2b(AnyKey),
|
||||||
magic_hash({binary, BK}).
|
magic_hash({binary, BK}).
|
||||||
|
|
||||||
hash1(H, <<>>) ->
|
hash1(H, <<>>) ->
|
||||||
|
@ -64,6 +65,17 @@ hash1(H, <<B:8/integer, Rest/bytes>>) ->
|
||||||
hash1(H2, Rest).
|
hash1(H2, Rest).
|
||||||
|
|
||||||
|
|
||||||
|
-spec t2b(term()) -> binary().
|
||||||
|
%% @doc
|
||||||
|
%% term_to_binary with options necessary to ensure backwards compatability
|
||||||
|
%% in the handling of atoms (within OTP 26).
|
||||||
|
%% See https://github.com/martinsumner/leveled/issues/407
|
||||||
|
%% If the binary() which is outputted is to be hashed for comparison, then
|
||||||
|
%% this must be used.
|
||||||
|
t2b(Term) ->
|
||||||
|
term_to_binary(Term, [{minor_version, 1}]).
|
||||||
|
|
||||||
|
|
||||||
-spec safe_rename(string(), string(), binary(), boolean()) -> ok.
|
-spec safe_rename(string(), string(), binary(), boolean()) -> ok.
|
||||||
%% @doc
|
%% @doc
|
||||||
%% Write a file, sync it and rename it (and for super-safe mode read it back)
|
%% Write a file, sync it and rename it (and for super-safe mode read it back)
|
||||||
|
|
|
@ -74,9 +74,10 @@ simple_test_withlog(LogLevel, ForcedLogs) ->
|
||||||
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", "Value2",
|
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", "Value2",
|
||||||
[{add, "Index1", "Term1"}]),
|
[{add, "Index1", "Term1"}]),
|
||||||
{ok, "Value2"} = leveled_bookie:book_get(Bookie2, "Bucket1", "Key2"),
|
{ok, "Value2"} = leveled_bookie:book_get(Bookie2, "Bucket1", "Key2"),
|
||||||
{ok, {62888926, 60, undefined}} = leveled_bookie:book_head(Bookie2,
|
{ok, {62888926, S, undefined}} =
|
||||||
"Bucket1",
|
leveled_bookie:book_head(Bookie2, "Bucket1", "Key2"),
|
||||||
"Key2"),
|
true = (S == 58) or (S == 60),
|
||||||
|
% After OTP 26 the object is 58 bytes not 60
|
||||||
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
|
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
|
||||||
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", <<"Value2">>,
|
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", <<"Value2">>,
|
||||||
[{remove, "Index1", "Term1"},
|
[{remove, "Index1", "Term1"},
|
||||||
|
@ -199,16 +200,18 @@ bigsst_littlesst(_Config) ->
|
||||||
{compression_point, on_compact}],
|
{compression_point, on_compact}],
|
||||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||||
ObjL1 =
|
ObjL1 =
|
||||||
testutil:generate_objects(80000, 1, [],
|
lists:keysort(
|
||||||
|
1,
|
||||||
|
testutil:generate_objects(80000, 1, [],
|
||||||
leveled_rand:rand_bytes(100),
|
leveled_rand:rand_bytes(100),
|
||||||
fun() -> [] end, <<"B">>),
|
fun() -> [] end, <<"B">>)
|
||||||
|
),
|
||||||
testutil:riakload(Bookie1, ObjL1),
|
testutil:riakload(Bookie1, ObjL1),
|
||||||
testutil:check_forlist(Bookie1, ObjL1),
|
testutil:check_forlist(Bookie1, ObjL1),
|
||||||
JFP = RootPath ++ "/ledger/ledger_files/",
|
JFP = RootPath ++ "/ledger/ledger_files/",
|
||||||
{ok, FNS1} = file:list_dir(JFP),
|
{ok, FNS1} = file:list_dir(JFP),
|
||||||
ok = leveled_bookie:book_destroy(Bookie1),
|
ok = leveled_bookie:book_destroy(Bookie1),
|
||||||
|
|
||||||
|
|
||||||
StartOpts2 = lists:ukeysort(1, [{max_sstslots, 24}|StartOpts1]),
|
StartOpts2 = lists:ukeysort(1, [{max_sstslots, 24}|StartOpts1]),
|
||||||
{ok, Bookie2} = leveled_bookie:book_start(StartOpts2),
|
{ok, Bookie2} = leveled_bookie:book_start(StartOpts2),
|
||||||
testutil:riakload(Bookie2, ObjL1),
|
testutil:riakload(Bookie2, ObjL1),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue