Check segment is as expected with tuplebuckets
In head_only mode
This commit is contained in:
parent
e72a946f43
commit
e9fb893ea0
4 changed files with 118 additions and 19 deletions
|
@ -187,12 +187,12 @@ segment_hash({?HEAD_TAG, Bucket, Key, SubK})
|
||||||
segment_hash({?HEAD_TAG, Bucket, Key, _SubK})
|
segment_hash({?HEAD_TAG, Bucket, Key, _SubK})
|
||||||
when is_binary(Bucket), is_binary(Key) ->
|
when is_binary(Bucket), is_binary(Key) ->
|
||||||
segment_hash(<<Bucket/binary, Key/binary>>);
|
segment_hash(<<Bucket/binary, Key/binary>>);
|
||||||
% segment_hash({?HEAD_TAG, {BucketType, Bucket}, Key, SubKey})
|
segment_hash({?HEAD_TAG, {BucketType, Bucket}, Key, SubKey})
|
||||||
% when is_binary(BucketType), is_binary(Bucket) ->
|
when is_binary(BucketType), is_binary(Bucket) ->
|
||||||
% segment_hash({?HEAD_TAG,
|
segment_hash({?HEAD_TAG,
|
||||||
% <<BucketType/binary, Bucket/binary>>,
|
<<BucketType/binary, Bucket/binary>>,
|
||||||
% Key,
|
Key,
|
||||||
% SubKey});
|
SubKey});
|
||||||
segment_hash(Key) ->
|
segment_hash(Key) ->
|
||||||
segment_hash(term_to_binary(Key)).
|
segment_hash(term_to_binary(Key)).
|
||||||
|
|
||||||
|
|
|
@ -199,7 +199,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
||||||
{_I2L, Obj2L, _Spc2L} = lists:last(ObjList2),
|
{_I2L, Obj2L, _Spc2L} = lists:last(ObjList2),
|
||||||
|
|
||||||
SegList =
|
SegList =
|
||||||
lists:map(fun(Obj) -> get_aae_segment(Obj) end,
|
lists:map(fun(Obj) -> testutil:get_aae_segment(Obj) end,
|
||||||
[Obj1, Obj2, Obj3, Obj4, Obj5, Obj2L]),
|
[Obj1, Obj2, Obj3, Obj4, Obj5, Obj2L]),
|
||||||
BKList =
|
BKList =
|
||||||
lists:map(fun(Obj) ->
|
lists:map(fun(Obj) ->
|
||||||
|
@ -223,15 +223,6 @@ basic_riak_tester(Bucket, KeyCount) ->
|
||||||
ok = leveled_bookie:book_destroy(Bookie2).
|
ok = leveled_bookie:book_destroy(Bookie2).
|
||||||
|
|
||||||
|
|
||||||
get_aae_segment(Obj) ->
|
|
||||||
get_aae_segment(testutil:get_bucket(Obj), testutil:get_key(Obj)).
|
|
||||||
|
|
||||||
get_aae_segment({Type, Bucket}, Key) ->
|
|
||||||
leveled_tictac:keyto_segment32(<<Type/binary, Bucket/binary, Key/binary>>);
|
|
||||||
get_aae_segment(Bucket, Key) ->
|
|
||||||
leveled_tictac:keyto_segment32(<<Bucket/binary, Key/binary>>).
|
|
||||||
|
|
||||||
|
|
||||||
fetchclocks_modifiedbetween(_Config) ->
|
fetchclocks_modifiedbetween(_Config) ->
|
||||||
RootPathA = testutil:reset_filestructure("fetchClockA"),
|
RootPathA = testutil:reset_filestructure("fetchClockA"),
|
||||||
RootPathB = testutil:reset_filestructure("fetchClockB"),
|
RootPathB = testutil:reset_filestructure("fetchClockB"),
|
||||||
|
|
|
@ -33,6 +33,8 @@
|
||||||
get_compressiblevalue/0,
|
get_compressiblevalue/0,
|
||||||
get_compressiblevalue_andinteger/0,
|
get_compressiblevalue_andinteger/0,
|
||||||
get_randomindexes_generator/1,
|
get_randomindexes_generator/1,
|
||||||
|
get_aae_segment/1,
|
||||||
|
get_aae_segment/2,
|
||||||
name_list/0,
|
name_list/0,
|
||||||
load_objects/5,
|
load_objects/5,
|
||||||
load_objects/6,
|
load_objects/6,
|
||||||
|
@ -799,4 +801,14 @@ find_journals(RootPath) ->
|
||||||
CDBFiles.
|
CDBFiles.
|
||||||
|
|
||||||
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
|
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
|
||||||
MegaSec * 1000000 + Seconds.
|
MegaSec * 1000000 + Seconds.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
get_aae_segment(Obj) ->
|
||||||
|
get_aae_segment(testutil:get_bucket(Obj), testutil:get_key(Obj)).
|
||||||
|
|
||||||
|
get_aae_segment({Type, Bucket}, Key) ->
|
||||||
|
leveled_tictac:keyto_segment32(<<Type/binary, Bucket/binary, Key/binary>>);
|
||||||
|
get_aae_segment(Bucket, Key) ->
|
||||||
|
leveled_tictac:keyto_segment32(<<Bucket/binary, Key/binary>>).
|
|
@ -5,13 +5,15 @@
|
||||||
-export([
|
-export([
|
||||||
many_put_compare/1,
|
many_put_compare/1,
|
||||||
index_compare/1,
|
index_compare/1,
|
||||||
basic_headonly/1
|
basic_headonly/1,
|
||||||
|
tuplebuckets_headonly/1
|
||||||
]).
|
]).
|
||||||
|
|
||||||
all() -> [
|
all() -> [
|
||||||
many_put_compare,
|
many_put_compare,
|
||||||
index_compare,
|
index_compare,
|
||||||
basic_headonly
|
basic_headonly,
|
||||||
|
tuplebuckets_headonly
|
||||||
].
|
].
|
||||||
|
|
||||||
-define(LMD_FORMAT, "~4..0w~2..0w~2..0w~2..0w~2..0w").
|
-define(LMD_FORMAT, "~4..0w~2..0w~2..0w~2..0w~2..0w").
|
||||||
|
@ -540,6 +542,100 @@ index_compare(_Config) ->
|
||||||
ok = leveled_bookie:book_close(Book2D).
|
ok = leveled_bookie:book_close(Book2D).
|
||||||
|
|
||||||
|
|
||||||
|
tuplebuckets_headonly(_Config) ->
|
||||||
|
ObjectCount = 60000,
|
||||||
|
|
||||||
|
RootPathHO = testutil:reset_filestructure("testTBHO"),
|
||||||
|
StartOpts1 = [{root_path, RootPathHO},
|
||||||
|
{max_pencillercachesize, 16000},
|
||||||
|
{sync_strategy, none},
|
||||||
|
{head_only, with_lookup},
|
||||||
|
{max_journalsize, 500000}],
|
||||||
|
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||||
|
|
||||||
|
ObjectSpecFun =
|
||||||
|
fun(Op) ->
|
||||||
|
fun(N) ->
|
||||||
|
Bucket = {<<"BucketType">>, <<"B", 0:4/integer, N:4/integer>>},
|
||||||
|
Key = <<"K", N:32/integer>>,
|
||||||
|
<<Hash:32/integer, _RestBN/bitstring>> =
|
||||||
|
crypto:hash(md5, <<N:32/integer>>),
|
||||||
|
{Op, Bucket, Key, null, Hash}
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
|
||||||
|
ObjectSpecL = lists:map(ObjectSpecFun(add), lists:seq(1, ObjectCount)),
|
||||||
|
|
||||||
|
SW0 = os:timestamp(),
|
||||||
|
ok = load_objectspecs(ObjectSpecL, 32, Bookie1),
|
||||||
|
io:format("Loaded an object count of ~w in ~w ms~n",
|
||||||
|
[ObjectCount, timer:now_diff(os:timestamp(), SW0)/1000]),
|
||||||
|
|
||||||
|
CheckHeadFun =
|
||||||
|
fun({add, B, K, null, H}) ->
|
||||||
|
{ok, H} =
|
||||||
|
leveled_bookie:book_headonly(Bookie1, B, K, null)
|
||||||
|
end,
|
||||||
|
lists:foreach(CheckHeadFun, ObjectSpecL),
|
||||||
|
|
||||||
|
BucketList =
|
||||||
|
lists:map(fun(I) ->
|
||||||
|
{<<"BucketType">>, <<"B", 0:4/integer, I:4/integer>>}
|
||||||
|
end,
|
||||||
|
lists:seq(0, 15)),
|
||||||
|
|
||||||
|
FoldHeadFun =
|
||||||
|
fun(B, {K, null}, V, Acc) ->
|
||||||
|
[{add, B, K, null, V}|Acc]
|
||||||
|
end,
|
||||||
|
SW1 = os:timestamp(),
|
||||||
|
|
||||||
|
{async, HeadRunner1} =
|
||||||
|
leveled_bookie:book_headfold(Bookie1,
|
||||||
|
?HEAD_TAG,
|
||||||
|
{bucket_list, BucketList},
|
||||||
|
{FoldHeadFun, []},
|
||||||
|
false, false,
|
||||||
|
false),
|
||||||
|
ReturnedObjSpecL1 = lists:reverse(HeadRunner1()),
|
||||||
|
[FirstItem|_Rest] = ReturnedObjSpecL1,
|
||||||
|
LastItem = lists:last(ReturnedObjSpecL1),
|
||||||
|
|
||||||
|
io:format("Returned ~w objects with first ~w and last ~w in ~w ms~n",
|
||||||
|
[length(ReturnedObjSpecL1),
|
||||||
|
FirstItem, LastItem,
|
||||||
|
timer:now_diff(os:timestamp(), SW1)/1000]),
|
||||||
|
|
||||||
|
true = ReturnedObjSpecL1 == lists:sort(ObjectSpecL),
|
||||||
|
|
||||||
|
{add, {TB, B1}, K1, null, _H1} = FirstItem,
|
||||||
|
{add, {TB, BL}, KL, null, _HL} = LastItem,
|
||||||
|
SegList = [testutil:get_aae_segment({TB, B1}, K1),
|
||||||
|
testutil:get_aae_segment({TB, BL}, KL)],
|
||||||
|
|
||||||
|
SW2 = os:timestamp(),
|
||||||
|
{async, HeadRunner2} =
|
||||||
|
leveled_bookie:book_headfold(Bookie1,
|
||||||
|
?HEAD_TAG,
|
||||||
|
{bucket_list, BucketList},
|
||||||
|
{FoldHeadFun, []},
|
||||||
|
false, false,
|
||||||
|
SegList),
|
||||||
|
ReturnedObjSpecL2 = lists:reverse(HeadRunner2()),
|
||||||
|
|
||||||
|
io:format("Returned ~w objects using seglist in ~w ms~n",
|
||||||
|
[length(ReturnedObjSpecL2),
|
||||||
|
timer:now_diff(os:timestamp(), SW2)/1000]),
|
||||||
|
|
||||||
|
true = length(ReturnedObjSpecL2) < (ObjectCount/1000 + 2),
|
||||||
|
% Not too many false positives
|
||||||
|
true = lists:member(FirstItem, ReturnedObjSpecL2),
|
||||||
|
true = lists:member(LastItem, ReturnedObjSpecL2),
|
||||||
|
|
||||||
|
leveled_bookie:book_destroy(Bookie1).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
basic_headonly(_Config) ->
|
basic_headonly(_Config) ->
|
||||||
ObjectCount = 200000,
|
ObjectCount = 200000,
|
||||||
RemoveCount = 100,
|
RemoveCount = 100,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue