2016-10-29 13:27:21 +01:00
|
|
|
%% -------- PENCILLER MEMORY ---------
|
|
|
|
%%
|
|
|
|
%% Module that provides functions for maintaining the L0 memory of the
|
|
|
|
%% Penciller.
|
|
|
|
%%
|
|
|
|
%% It is desirable that the L0Mem can efficiently handle the push of new trees
|
|
|
|
%% whilst maintaining the capability to quickly snapshot the memory for clones
|
|
|
|
%% of the Penciller.
|
|
|
|
%%
|
2016-11-03 16:46:25 +00:00
|
|
|
%% ETS tables are not used due to complications with managing their mutability,
|
|
|
|
%% as the database is snapshotted.
|
2016-10-29 13:27:21 +01:00
|
|
|
%%
|
|
|
|
%% An attempt was made to merge all trees into a single tree on push (in a
|
|
|
|
%% spawned process), but this proved to have an expensive impact as the tree
|
|
|
|
%% got larger.
|
|
|
|
%%
|
|
|
|
%% This approach is to keep a list of trees which have been received in the
|
|
|
|
%% order which they were received. There is then a fixed-size array of hashes
|
|
|
|
%% used to either point lookups at the right tree in the list, or inform the
|
|
|
|
%% requestor it is not present avoiding any lookups.
|
|
|
|
%%
|
2016-11-03 16:22:51 +00:00
|
|
|
%% The trade-off taken with the approach is that the size of the L0Cache is
|
2017-01-06 10:09:15 +00:00
|
|
|
%% uncertain. The Size count is incremented based on the inbound size and so
|
|
|
|
%% does not necessarily reflect the size once the lists are merged (reflecting
|
|
|
|
%% rotating objects)
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2016-10-29 13:27:21 +01:00
|
|
|
-module(leveled_pmem).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
|
|
|
-include("include/leveled.hrl").
|
|
|
|
|
2016-10-29 13:27:21 +01:00
|
|
|
-export([
|
2017-01-05 21:58:33 +00:00
|
|
|
prepare_for_index/2,
|
2016-12-11 01:02:56 +00:00
|
|
|
add_to_cache/4,
|
2016-10-31 01:33:33 +00:00
|
|
|
to_list/2,
|
2016-10-29 13:27:21 +01:00
|
|
|
check_levelzero/3,
|
2017-01-05 21:58:33 +00:00
|
|
|
check_levelzero/4,
|
2016-12-11 05:23:24 +00:00
|
|
|
merge_trees/4,
|
2017-01-05 21:58:33 +00:00
|
|
|
add_to_index/3,
|
2016-12-11 05:23:24 +00:00
|
|
|
new_index/0,
|
2016-12-11 08:16:00 +00:00
|
|
|
clear_index/1,
|
2016-12-11 05:23:24 +00:00
|
|
|
check_index/2
|
2016-10-29 13:27:21 +01:00
|
|
|
]).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
|
|
|
-include_lib("eunit/include/eunit.hrl").
|
|
|
|
|
2017-06-01 21:37:23 +01:00
|
|
|
% -type index_array() :: array:array().
|
2018-06-23 15:15:49 +01:00
|
|
|
-type index_array() :: any()|none. % To live with OTP16
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2018-06-04 10:57:37 +01:00
|
|
|
-export_type([index_array/0]).
|
|
|
|
|
2016-10-29 01:06:00 +01:00
|
|
|
%%%============================================================================
|
|
|
|
%%% API
|
|
|
|
%%%============================================================================
|
|
|
|
|
2018-05-04 15:24:08 +01:00
|
|
|
-spec prepare_for_index(index_array(), leveled_codec:segment_hash())
|
2017-10-20 23:04:29 +01:00
|
|
|
-> index_array().
|
2017-05-30 15:36:50 +01:00
|
|
|
%% @doc
|
|
|
|
%% Add the hash of a key to the index. This is 'prepared' in the sense that
|
|
|
|
%% this index is not use until it is loaded into the main index.
|
|
|
|
%%
|
|
|
|
%% prepare_for_index is called from the Bookie when been added to the ledger
|
|
|
|
%% cache, but the index is not used until that ledger cache is in the
|
|
|
|
%% penciller L0 memory
|
2017-01-05 22:42:59 +00:00
|
|
|
prepare_for_index(IndexArray, no_lookup) ->
|
|
|
|
IndexArray;
|
2017-01-05 21:58:33 +00:00
|
|
|
prepare_for_index(IndexArray, Hash) ->
|
|
|
|
{Slot, H0} = split_hash(Hash),
|
|
|
|
Bin = array:get(Slot, IndexArray),
|
|
|
|
array:set(Slot, <<Bin/binary, 1:1/integer, H0:23/integer>>, IndexArray).
|
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
-spec add_to_index(index_array(), index_array(), integer()) -> index_array().
|
|
|
|
%% @doc
|
|
|
|
%% Expand the penciller's current index array with the details from a new
|
|
|
|
%% ledger cache tree sent from the Bookie. The tree will have a cache slot
|
|
|
|
%% which is the index of this ledger_cache in the list of the ledger_caches
|
2017-01-05 21:58:33 +00:00
|
|
|
add_to_index(LM1Array, L0Index, CacheSlot) when CacheSlot < 128 ->
|
2016-12-11 05:23:24 +00:00
|
|
|
IndexAddFun =
|
2017-01-05 21:58:33 +00:00
|
|
|
fun(Slot, Acc) ->
|
|
|
|
Bin0 = array:get(Slot, Acc),
|
|
|
|
BinLM1 = array:get(Slot, LM1Array),
|
|
|
|
array:set(Slot,
|
|
|
|
<<Bin0/binary,
|
|
|
|
0:1/integer, CacheSlot:7/integer,
|
|
|
|
BinLM1/binary>>,
|
|
|
|
Acc)
|
|
|
|
end,
|
|
|
|
lists:foldl(IndexAddFun, L0Index, lists:seq(0, 255)).
|
2016-12-11 05:23:24 +00:00
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
-spec new_index() -> index_array().
|
|
|
|
%% @doc
|
|
|
|
%% Create a new index array
|
2016-12-11 05:23:24 +00:00
|
|
|
new_index() ->
|
2017-01-05 21:58:33 +00:00
|
|
|
array:new([{size, 256}, {default, <<>>}]).
|
2016-12-11 05:23:24 +00:00
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
-spec clear_index(index_array()) -> index_array().
|
|
|
|
%% @doc
|
|
|
|
%% Create a new index array
|
2017-01-05 21:58:33 +00:00
|
|
|
clear_index(_L0Index) ->
|
|
|
|
new_index().
|
2016-12-11 07:07:30 +00:00
|
|
|
|
2017-10-20 23:04:29 +01:00
|
|
|
-spec check_index({integer(), integer()}, index_array()) -> list(integer()).
|
2017-05-30 15:36:50 +01:00
|
|
|
%% @doc
|
|
|
|
%% return a list of positions in the list of cache arrays that may contain the
|
|
|
|
%% key associated with the hash being checked
|
2016-12-11 05:23:24 +00:00
|
|
|
check_index(Hash, L0Index) ->
|
2017-01-05 21:58:33 +00:00
|
|
|
{Slot, H0} = split_hash(Hash),
|
|
|
|
Bin = array:get(Slot, L0Index),
|
|
|
|
find_pos(Bin, H0, [], 0).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
|
|
|
|
-spec add_to_cache(integer(),
|
|
|
|
{tuple(), integer(), integer()},
|
|
|
|
integer(),
|
|
|
|
list()) ->
|
|
|
|
{integer(), integer(), list()}.
|
|
|
|
%% @doc
|
|
|
|
%% The penciller's cache is a list of leveled_trees, this adds a new tree to
|
|
|
|
%% that cache, providing an update to the approximate size of the cache and
|
|
|
|
%% the Ledger's SQN.
|
|
|
|
add_to_cache(L0Size, {LevelMinus1, MinSQN, MaxSQN}, LedgerSQN, TreeList) ->
|
|
|
|
LM1Size = leveled_tree:tsize(LevelMinus1),
|
|
|
|
case LM1Size of
|
|
|
|
0 ->
|
|
|
|
{LedgerSQN, L0Size, TreeList};
|
|
|
|
_ ->
|
|
|
|
if
|
|
|
|
MinSQN >= LedgerSQN ->
|
|
|
|
{MaxSQN,
|
|
|
|
L0Size + LM1Size,
|
|
|
|
lists:append(TreeList, [LevelMinus1])}
|
|
|
|
end
|
|
|
|
end.
|
|
|
|
|
|
|
|
-spec to_list(integer(), fun()) -> list().
|
|
|
|
%% @doc
|
|
|
|
%% The cache is a list of leveled_trees of length Slots. This will fetch
|
|
|
|
%% each tree in turn by slot ID and then produce a merged/sorted output of
|
|
|
|
%% Keys and Values (to load into a SST file).
|
|
|
|
%%
|
|
|
|
%% Each slot is requested in turn to avoid halting the penciller whilst it
|
|
|
|
%% does a large object copy of the whole cache.
|
2016-10-31 01:33:33 +00:00
|
|
|
to_list(Slots, FetchFun) ->
|
2016-10-29 13:27:21 +01:00
|
|
|
SW = os:timestamp(),
|
2016-10-31 01:33:33 +00:00
|
|
|
SlotList = lists:reverse(lists:seq(1, Slots)),
|
|
|
|
FullList = lists:foldl(fun(Slot, Acc) ->
|
|
|
|
Tree = FetchFun(Slot),
|
2017-01-20 16:36:20 +00:00
|
|
|
L = leveled_tree:to_list(Tree),
|
2016-10-31 01:33:33 +00:00
|
|
|
lists:ukeymerge(1, Acc, L)
|
2016-10-29 13:27:21 +01:00
|
|
|
end,
|
|
|
|
[],
|
2016-10-31 01:33:33 +00:00
|
|
|
SlotList),
|
2016-11-03 16:05:43 +00:00
|
|
|
leveled_log:log_timer("PM002", [length(FullList)], SW),
|
2016-10-31 01:33:33 +00:00
|
|
|
FullList.
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
-spec check_levelzero(tuple(), list(integer()), list())
|
|
|
|
-> {boolean(), tuple|not_found}.
|
|
|
|
%% @doc
|
|
|
|
%% Check for the presence of a given Key in the Level Zero cache, with the
|
|
|
|
%% index array having been checked first for a list of potential positions
|
|
|
|
%% in the list of ledger caches - and then each potential ledger_cache being
|
|
|
|
%% checked (with the most recently received cache being checked first) until a
|
|
|
|
%% match is found.
|
2017-01-05 21:58:33 +00:00
|
|
|
check_levelzero(Key, PosList, TreeList) ->
|
2017-10-20 23:04:29 +01:00
|
|
|
check_levelzero(Key, leveled_codec:segment_hash(Key), PosList, TreeList).
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2017-10-20 23:04:29 +01:00
|
|
|
-spec check_levelzero(tuple(), {integer(), integer()}, list(integer()), list())
|
2017-05-30 15:36:50 +01:00
|
|
|
-> {boolean(), tuple|not_found}.
|
|
|
|
%% @doc
|
|
|
|
%% Check for the presence of a given Key in the Level Zero cache, with the
|
|
|
|
%% index array having been checked first for a list of potential positions
|
|
|
|
%% in the list of ledger caches - and then each potential ledger_cache being
|
|
|
|
%% checked (with the most recently received cache being checked first) until a
|
|
|
|
%% match is found.
|
2017-01-05 21:58:33 +00:00
|
|
|
check_levelzero(_Key, _Hash, _PosList, []) ->
|
|
|
|
{false, not_found};
|
|
|
|
check_levelzero(_Key, _Hash, [], _TreeList) ->
|
2016-12-10 14:15:35 +00:00
|
|
|
{false, not_found};
|
2017-01-05 21:58:33 +00:00
|
|
|
check_levelzero(Key, Hash, PosList, TreeList) ->
|
|
|
|
check_slotlist(Key, Hash, PosList, TreeList).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2017-05-30 15:36:50 +01:00
|
|
|
-spec merge_trees(tuple(), tuple(), list(tuple()), tuple()) -> list().
|
|
|
|
%% @doc
|
|
|
|
%% Return a list of keys and values across the level zero cache (and the
|
|
|
|
%% currently unmerged bookie's ledger cache) that are between StartKey
|
|
|
|
%% and EndKey (inclusive).
|
2017-01-20 16:36:20 +00:00
|
|
|
merge_trees(StartKey, EndKey, TreeList, LevelMinus1) ->
|
|
|
|
lists:foldl(fun(Tree, Acc) ->
|
|
|
|
R = leveled_tree:match_range(StartKey,
|
|
|
|
EndKey,
|
|
|
|
Tree),
|
2016-11-25 14:50:13 +00:00
|
|
|
lists:ukeymerge(1, Acc, R) end,
|
|
|
|
[],
|
2017-01-20 16:36:20 +00:00
|
|
|
[LevelMinus1|lists:reverse(TreeList)]).
|
2016-11-22 23:21:47 +00:00
|
|
|
|
2016-10-29 01:06:00 +01:00
|
|
|
%%%============================================================================
|
2016-10-29 13:27:21 +01:00
|
|
|
%%% Internal Functions
|
2016-10-29 01:06:00 +01:00
|
|
|
%%%============================================================================
|
|
|
|
|
2017-01-05 21:58:33 +00:00
|
|
|
|
|
|
|
find_pos(<<>>, _Hash, PosList, _SlotID) ->
|
|
|
|
PosList;
|
|
|
|
find_pos(<<1:1/integer, Hash:23/integer, T/binary>>, Hash, PosList, SlotID) ->
|
2018-12-10 12:01:11 +00:00
|
|
|
case lists:member(SlotID, PosList) of
|
|
|
|
true ->
|
|
|
|
find_pos(T, Hash, PosList, SlotID);
|
|
|
|
false ->
|
|
|
|
find_pos(T, Hash, PosList ++ [SlotID], SlotID)
|
|
|
|
end;
|
2017-01-05 21:58:33 +00:00
|
|
|
find_pos(<<1:1/integer, _Miss:23/integer, T/binary>>, Hash, PosList, SlotID) ->
|
|
|
|
find_pos(T, Hash, PosList, SlotID);
|
|
|
|
find_pos(<<0:1/integer, NxtSlot:7/integer, T/binary>>, Hash, PosList, _SlotID) ->
|
|
|
|
find_pos(T, Hash, PosList, NxtSlot).
|
|
|
|
|
|
|
|
|
2017-10-20 23:04:29 +01:00
|
|
|
split_hash({SegmentID, ExtraHash}) ->
|
|
|
|
Slot = SegmentID band 255,
|
|
|
|
H0 = (SegmentID bsr 8) bor (ExtraHash bsl 8),
|
|
|
|
{Slot, H0 band 8388607}.
|
2017-01-05 21:58:33 +00:00
|
|
|
|
2017-01-20 16:36:20 +00:00
|
|
|
check_slotlist(Key, _Hash, CheckList, TreeList) ->
|
2016-12-10 14:15:35 +00:00
|
|
|
SlotCheckFun =
|
2018-06-23 15:15:49 +01:00
|
|
|
fun(SlotToCheck, {Found, KV}) ->
|
|
|
|
case Found of
|
|
|
|
true ->
|
|
|
|
{Found, KV};
|
|
|
|
false ->
|
|
|
|
CheckTree = lists:nth(SlotToCheck, TreeList),
|
|
|
|
case leveled_tree:match(Key, CheckTree) of
|
|
|
|
none ->
|
2016-12-10 14:15:35 +00:00
|
|
|
{Found, KV};
|
2018-06-23 15:15:49 +01:00
|
|
|
{value, Value} ->
|
|
|
|
{true, {Key, Value}}
|
2016-12-10 14:15:35 +00:00
|
|
|
end
|
2018-06-23 15:15:49 +01:00
|
|
|
end
|
|
|
|
end,
|
2016-12-10 14:15:35 +00:00
|
|
|
lists:foldl(SlotCheckFun,
|
|
|
|
{false, not_found},
|
2016-12-11 01:02:56 +00:00
|
|
|
lists:reverse(CheckList)).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
|
|
|
%%%============================================================================
|
|
|
|
%%% Test
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
-ifdef(TEST).
|
|
|
|
|
2017-01-05 21:58:33 +00:00
|
|
|
generate_randomkeys_aslist(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|
|
|
lists:ukeysort(1,
|
|
|
|
generate_randomkeys(Seqn,
|
|
|
|
Count,
|
|
|
|
[],
|
|
|
|
BucketRangeLow,
|
|
|
|
BucketRangeHigh)).
|
|
|
|
|
2016-10-29 01:06:00 +01:00
|
|
|
generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
2017-01-05 21:58:33 +00:00
|
|
|
KVL = generate_randomkeys(Seqn,
|
|
|
|
Count,
|
|
|
|
[],
|
|
|
|
BucketRangeLow,
|
|
|
|
BucketRangeHigh),
|
2017-01-21 11:38:26 +00:00
|
|
|
leveled_tree:from_orderedlist(lists:ukeysort(1, KVL), ?CACHE_TYPE).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
|
|
|
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
|
|
|
Acc;
|
|
|
|
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
2018-12-11 15:44:37 +00:00
|
|
|
BNumber =
|
|
|
|
lists:flatten(
|
|
|
|
io_lib:format("K~4..0B",
|
|
|
|
[BucketLow + leveled_rand:uniform(BRange)])),
|
|
|
|
KNumber =
|
|
|
|
lists:flatten(io_lib:format("K~4..0B", [leveled_rand:uniform(1000)])),
|
2016-10-29 01:06:00 +01:00
|
|
|
{K, V} = {{o, "Bucket" ++ BNumber, "Key" ++ KNumber, null},
|
|
|
|
{Seqn, {active, infinity}, null}},
|
|
|
|
generate_randomkeys(Seqn + 1,
|
|
|
|
Count - 1,
|
2017-01-05 21:58:33 +00:00
|
|
|
[{K, V}|Acc],
|
2016-10-29 01:06:00 +01:00
|
|
|
BucketLow,
|
|
|
|
BRange).
|
|
|
|
|
|
|
|
|
2016-10-29 13:27:21 +01:00
|
|
|
compare_method_test() ->
|
2016-12-11 01:02:56 +00:00
|
|
|
R = lists:foldl(fun(_X, {LedgerSQN, L0Size, L0TreeList}) ->
|
2016-11-03 16:46:25 +00:00
|
|
|
LM1 = generate_randomkeys(LedgerSQN + 1,
|
|
|
|
2000, 1, 500),
|
2016-12-11 01:02:56 +00:00
|
|
|
add_to_cache(L0Size,
|
|
|
|
{LM1,
|
|
|
|
LedgerSQN + 1,
|
|
|
|
LedgerSQN + 2000},
|
|
|
|
LedgerSQN,
|
|
|
|
L0TreeList)
|
2016-10-29 01:06:00 +01:00
|
|
|
end,
|
2016-12-11 01:02:56 +00:00
|
|
|
{0, 0, []},
|
2016-10-29 01:06:00 +01:00
|
|
|
lists:seq(1, 16)),
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2016-12-11 01:02:56 +00:00
|
|
|
{SQN, Size, TreeList} = R,
|
2016-10-29 13:27:21 +01:00
|
|
|
?assertMatch(32000, SQN),
|
2016-11-03 16:22:51 +00:00
|
|
|
?assertMatch(true, Size =< 32000),
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2017-01-20 16:36:20 +00:00
|
|
|
TestList = leveled_tree:to_list(generate_randomkeys(1, 2000, 1, 800)),
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2016-12-11 01:02:56 +00:00
|
|
|
FindKeyFun =
|
|
|
|
fun(Key) ->
|
|
|
|
fun(Tree, {Found, KV}) ->
|
|
|
|
case Found of
|
|
|
|
true ->
|
|
|
|
{true, KV};
|
|
|
|
false ->
|
2017-01-20 16:36:20 +00:00
|
|
|
L0 = leveled_tree:match(Key, Tree),
|
2016-12-11 01:02:56 +00:00
|
|
|
case L0 of
|
|
|
|
none ->
|
|
|
|
{false, not_found};
|
|
|
|
{value, Value} ->
|
|
|
|
{true, {Key, Value}}
|
2016-11-25 14:50:13 +00:00
|
|
|
end
|
2016-12-11 01:02:56 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end,
|
2016-10-29 13:27:21 +01:00
|
|
|
|
2016-12-11 01:02:56 +00:00
|
|
|
S0 = lists:foldl(fun({Key, _V}, Acc) ->
|
|
|
|
R0 = lists:foldr(FindKeyFun(Key),
|
|
|
|
{false, not_found},
|
|
|
|
TreeList),
|
|
|
|
[R0|Acc] end,
|
2016-10-29 13:27:21 +01:00
|
|
|
[],
|
|
|
|
TestList),
|
2016-12-11 01:02:56 +00:00
|
|
|
|
2017-01-05 21:58:33 +00:00
|
|
|
PosList = lists:seq(1, length(TreeList)),
|
2016-12-11 01:02:56 +00:00
|
|
|
S1 = lists:foldl(fun({Key, _V}, Acc) ->
|
2017-01-05 21:58:33 +00:00
|
|
|
R0 = check_levelzero(Key, PosList, TreeList),
|
2016-12-10 14:15:35 +00:00
|
|
|
[R0|Acc]
|
|
|
|
end,
|
|
|
|
[],
|
|
|
|
TestList),
|
2016-10-29 13:27:21 +01:00
|
|
|
|
|
|
|
?assertMatch(S0, S1),
|
|
|
|
|
|
|
|
StartKey = {o, "Bucket0100", null, null},
|
|
|
|
EndKey = {o, "Bucket0200", null, null},
|
|
|
|
SWa = os:timestamp(),
|
2016-10-31 01:33:33 +00:00
|
|
|
FetchFun = fun(Slot) -> lists:nth(Slot, TreeList) end,
|
|
|
|
DumpList = to_list(length(TreeList), FetchFun),
|
2016-10-29 13:27:21 +01:00
|
|
|
Q0 = lists:foldl(fun({K, V}, Acc) ->
|
|
|
|
P = leveled_codec:endkey_passed(EndKey, K),
|
|
|
|
case {K, P} of
|
|
|
|
{K, false} when K >= StartKey ->
|
2017-01-20 16:36:20 +00:00
|
|
|
[{K, V}|Acc];
|
2016-10-29 13:27:21 +01:00
|
|
|
_ ->
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end,
|
2017-01-20 16:36:20 +00:00
|
|
|
[],
|
2016-10-29 13:27:21 +01:00
|
|
|
DumpList),
|
2017-01-21 11:38:26 +00:00
|
|
|
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, Q0), ?CACHE_TYPE),
|
2017-01-20 16:36:20 +00:00
|
|
|
Sz0 = leveled_tree:tsize(Tree),
|
2016-11-25 14:50:13 +00:00
|
|
|
io:format("Crude method took ~w microseconds resulting in tree of " ++
|
|
|
|
"size ~w~n",
|
2016-10-29 13:27:21 +01:00
|
|
|
[timer:now_diff(os:timestamp(), SWa), Sz0]),
|
|
|
|
SWb = os:timestamp(),
|
2017-01-21 11:38:26 +00:00
|
|
|
Q1 = merge_trees(StartKey, EndKey, TreeList, leveled_tree:empty(?CACHE_TYPE)),
|
2016-11-25 14:50:13 +00:00
|
|
|
Sz1 = length(Q1),
|
|
|
|
io:format("Merge method took ~w microseconds resulting in tree of " ++
|
|
|
|
"size ~w~n",
|
2016-10-29 13:27:21 +01:00
|
|
|
[timer:now_diff(os:timestamp(), SWb), Sz1]),
|
|
|
|
?assertMatch(Sz0, Sz1).
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2017-07-02 22:23:02 +01:00
|
|
|
with_index_test_() ->
|
|
|
|
% Otherwise this test may timeout when run with coverage enabled
|
|
|
|
{timeout, 60, fun with_index_test2/0}.
|
|
|
|
|
|
|
|
with_index_test2() ->
|
2017-01-05 21:58:33 +00:00
|
|
|
IndexPrepareFun =
|
|
|
|
fun({K, _V}, Acc) ->
|
2017-10-20 23:04:29 +01:00
|
|
|
H = leveled_codec:segment_hash(K),
|
2017-01-05 21:58:33 +00:00
|
|
|
prepare_for_index(Acc, H)
|
|
|
|
end,
|
|
|
|
LoadFun =
|
|
|
|
fun(_X, {{LedgerSQN, L0Size, L0TreeList}, L0Idx, SrcList}) ->
|
|
|
|
LM1 = generate_randomkeys_aslist(LedgerSQN + 1, 2000, 1, 500),
|
|
|
|
LM1Array = lists:foldl(IndexPrepareFun, new_index(), LM1),
|
2017-01-21 11:38:26 +00:00
|
|
|
LM1SL = leveled_tree:from_orderedlist(lists:ukeysort(1, LM1), ?CACHE_TYPE),
|
2017-01-05 21:58:33 +00:00
|
|
|
UpdL0Index = add_to_index(LM1Array, L0Idx, length(L0TreeList) + 1),
|
|
|
|
R = add_to_cache(L0Size,
|
|
|
|
{LM1SL, LedgerSQN + 1, LedgerSQN + 2000},
|
|
|
|
LedgerSQN,
|
|
|
|
L0TreeList),
|
|
|
|
{R, UpdL0Index, lists:ukeymerge(1, LM1, SrcList)}
|
|
|
|
end,
|
|
|
|
|
|
|
|
R0 = lists:foldl(LoadFun, {{0, 0, []}, new_index(), []}, lists:seq(1, 16)),
|
|
|
|
|
|
|
|
{{SQN, Size, TreeList}, L0Index, SrcKVL} = R0,
|
|
|
|
?assertMatch(32000, SQN),
|
|
|
|
?assertMatch(true, Size =< 32000),
|
2016-11-24 20:16:41 +00:00
|
|
|
|
2017-01-05 21:58:33 +00:00
|
|
|
CheckFun =
|
|
|
|
fun({K, V}, {L0Idx, L0Cache}) ->
|
2017-10-20 23:04:29 +01:00
|
|
|
H = leveled_codec:segment_hash(K),
|
2017-01-05 21:58:33 +00:00
|
|
|
PosList = check_index(H, L0Idx),
|
|
|
|
?assertMatch({true, {K, V}},
|
|
|
|
check_slotlist(K, H, PosList, L0Cache)),
|
|
|
|
{L0Idx, L0Cache}
|
|
|
|
end,
|
|
|
|
|
|
|
|
_R1 = lists:foldl(CheckFun, {L0Index, TreeList}, SrcKVL).
|
|
|
|
|
2016-10-29 01:06:00 +01:00
|
|
|
|
2017-07-31 20:20:39 +02:00
|
|
|
-endif.
|