Change so that type of cache is parameterised
The Tree doesn't seem to be better than the skiplist. Get ready to make this switchable
This commit is contained in:
parent
4846272393
commit
ed96d0ca7a
5 changed files with 48 additions and 30 deletions
|
@ -15,6 +15,8 @@
|
|||
%% Inker key type used for tombstones
|
||||
-define(INKT_TOMB, tomb).
|
||||
|
||||
-define(CACHE_TYPE, tree).
|
||||
|
||||
-record(sft_options,
|
||||
{wait = true :: boolean(),
|
||||
expire_tombstones = false :: boolean(),
|
||||
|
|
|
@ -154,7 +154,7 @@
|
|||
-define(LONG_RUNNING, 80000).
|
||||
|
||||
-record(ledger_cache, {mem :: ets:tab(),
|
||||
loader = leveled_tree:empty() :: tuple(),
|
||||
loader = leveled_tree:empty(?CACHE_TYPE) :: tuple(),
|
||||
load_queue = [] :: list(),
|
||||
index = leveled_pmem:new_index(), % array
|
||||
min_sqn = infinity :: integer()|infinity,
|
||||
|
@ -478,7 +478,7 @@ push_ledgercache(Penciller, Cache) ->
|
|||
|
||||
loadqueue_ledgercache(Cache) ->
|
||||
SL = lists:ukeysort(1, Cache#ledger_cache.load_queue),
|
||||
T = leveled_tree:from_orderedlist(SL),
|
||||
T = leveled_tree:from_orderedlist(SL, ?CACHE_TYPE),
|
||||
Cache#ledger_cache{load_queue = [], loader = T}.
|
||||
|
||||
%%%============================================================================
|
||||
|
@ -726,7 +726,8 @@ snapshot_store(State, SnapType) ->
|
|||
|
||||
readycache_forsnapshot(LedgerCache) ->
|
||||
% Need to convert the Ledger Cache away from using the ETS table
|
||||
Tree = leveled_tree:from_orderedset(LedgerCache#ledger_cache.mem),
|
||||
Tree = leveled_tree:from_orderedset(LedgerCache#ledger_cache.mem,
|
||||
?CACHE_TYPE),
|
||||
Idx = LedgerCache#ledger_cache.index,
|
||||
MinSQN = LedgerCache#ledger_cache.min_sqn,
|
||||
MaxSQN = LedgerCache#ledger_cache.max_sqn,
|
||||
|
@ -982,7 +983,7 @@ maybepush_ledgercache(MaxCacheSize, Cache, Penciller) ->
|
|||
TimeToPush = maybe_withjitter(CacheSize, MaxCacheSize),
|
||||
if
|
||||
TimeToPush ->
|
||||
CacheToLoad = {leveled_tree:from_orderedset(Tab),
|
||||
CacheToLoad = {leveled_tree:from_orderedset(Tab, ?CACHE_TYPE),
|
||||
Cache#ledger_cache.index,
|
||||
Cache#ledger_cache.min_sqn,
|
||||
Cache#ledger_cache.max_sqn},
|
||||
|
|
|
@ -404,7 +404,7 @@ handle_call({fetch_keys, StartKey, EndKey, AccFun, InitAcc, MaxKeys},
|
|||
leveled_pmem:merge_trees(StartKey,
|
||||
EndKey,
|
||||
State#state.levelzero_cache,
|
||||
leveled_tree:empty());
|
||||
leveled_tree:empty(?CACHE_TYPE));
|
||||
List ->
|
||||
List
|
||||
end,
|
||||
|
@ -1084,7 +1084,7 @@ maybe_pause_push(PCL, KL) ->
|
|||
{T0, I0, infinity, 0},
|
||||
KL),
|
||||
SL = element(1, T1),
|
||||
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, SL)),
|
||||
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, SL), ?CACHE_TYPE),
|
||||
T2 = setelement(1, T1, Tree),
|
||||
case pcl_pushmem(PCL, T2) of
|
||||
returned ->
|
||||
|
@ -1330,7 +1330,7 @@ foldwithimm_simple_test() ->
|
|||
KL1A = [{{o, "Bucket1", "Key6", null}, {7, {active, infinity}, 0, null}},
|
||||
{{o, "Bucket1", "Key1", null}, {8, {active, infinity}, 0, null}},
|
||||
{{o, "Bucket1", "Key8", null}, {9, {active, infinity}, 0, null}}],
|
||||
IMM2 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1A)),
|
||||
IMM2 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1A), ?CACHE_TYPE),
|
||||
IMMiter = leveled_tree:match_range({o, "Bucket1", "Key1", null},
|
||||
{o, null, null, null},
|
||||
IMM2),
|
||||
|
@ -1356,7 +1356,7 @@ foldwithimm_simple_test() ->
|
|||
{{o, "Bucket1", "Key5", null}, 2}], AccA),
|
||||
|
||||
KL1B = [{{o, "Bucket1", "Key4", null}, {10, {active, infinity}, 0, null}}|KL1A],
|
||||
IMM3 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1B)),
|
||||
IMM3 = leveled_tree:from_orderedlist(lists:ukeysort(1, KL1B), ?CACHE_TYPE),
|
||||
IMMiterB = leveled_tree:match_range({o, "Bucket1", "Key1", null},
|
||||
{o, null, null, null},
|
||||
IMM3),
|
||||
|
@ -1374,7 +1374,7 @@ create_file_test() ->
|
|||
Filename = "../test/new_file.sst",
|
||||
ok = file:write_file(Filename, term_to_binary("hello")),
|
||||
KVL = lists:usort(generate_randomkeys(10000)),
|
||||
Tree = leveled_tree:from_orderedlist(KVL),
|
||||
Tree = leveled_tree:from_orderedlist(KVL, ?CACHE_TYPE),
|
||||
FetchFun = fun(Slot) -> lists:nth(Slot, [Tree]) end,
|
||||
{ok,
|
||||
SP,
|
||||
|
|
|
@ -188,7 +188,7 @@ generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
[],
|
||||
BucketRangeLow,
|
||||
BucketRangeHigh),
|
||||
leveled_tree:from_orderedlist(lists:ukeysort(1, KVL)).
|
||||
leveled_tree:from_orderedlist(lists:ukeysort(1, KVL), ?CACHE_TYPE).
|
||||
|
||||
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
|
@ -277,13 +277,13 @@ compare_method_test() ->
|
|||
end,
|
||||
[],
|
||||
DumpList),
|
||||
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, Q0)),
|
||||
Tree = leveled_tree:from_orderedlist(lists:ukeysort(1, Q0), ?CACHE_TYPE),
|
||||
Sz0 = leveled_tree:tsize(Tree),
|
||||
io:format("Crude method took ~w microseconds resulting in tree of " ++
|
||||
"size ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWa), Sz0]),
|
||||
SWb = os:timestamp(),
|
||||
Q1 = merge_trees(StartKey, EndKey, TreeList, leveled_tree:empty()),
|
||||
Q1 = merge_trees(StartKey, EndKey, TreeList, leveled_tree:empty(?CACHE_TYPE)),
|
||||
Sz1 = length(Q1),
|
||||
io:format("Merge method took ~w microseconds resulting in tree of " ++
|
||||
"size ~w~n",
|
||||
|
@ -300,7 +300,7 @@ with_index_test() ->
|
|||
fun(_X, {{LedgerSQN, L0Size, L0TreeList}, L0Idx, SrcList}) ->
|
||||
LM1 = generate_randomkeys_aslist(LedgerSQN + 1, 2000, 1, 500),
|
||||
LM1Array = lists:foldl(IndexPrepareFun, new_index(), LM1),
|
||||
LM1SL = leveled_tree:from_orderedlist(lists:ukeysort(1, LM1)),
|
||||
LM1SL = leveled_tree:from_orderedlist(lists:ukeysort(1, LM1), ?CACHE_TYPE),
|
||||
UpdL0Index = add_to_index(LM1Array, L0Idx, length(L0TreeList) + 1),
|
||||
R = add_to_cache(L0Size,
|
||||
{LM1SL, LedgerSQN + 1, LedgerSQN + 2000},
|
||||
|
|
|
@ -13,15 +13,17 @@
|
|||
-include("include/leveled.hrl").
|
||||
|
||||
-export([
|
||||
from_orderedlist/1,
|
||||
from_orderedset/1,
|
||||
from_orderedlist/2,
|
||||
from_orderedset/2,
|
||||
from_orderedlist/3,
|
||||
from_orderedset/3,
|
||||
to_list/1,
|
||||
match_range/3,
|
||||
search_range/4,
|
||||
match/2,
|
||||
search/3,
|
||||
tsize/1,
|
||||
empty/0
|
||||
empty/1
|
||||
]).
|
||||
|
||||
-include_lib("eunit/include/eunit.hrl").
|
||||
|
@ -33,12 +35,18 @@
|
|||
%%% API
|
||||
%%%============================================================================
|
||||
|
||||
from_orderedlist(OrderedList) ->
|
||||
L = length(OrderedList),
|
||||
{tree, L, from_orderedlist(OrderedList, [], L)}.
|
||||
from_orderedset(Table, tree) ->
|
||||
from_orderedlist(ets:tab2list(Table), tree, ?SKIP_WIDTH).
|
||||
|
||||
from_orderedset(Table) ->
|
||||
from_orderedlist(ets:tab2list(Table)).
|
||||
from_orderedset(Table, tree, SkipWidth) ->
|
||||
from_orderedlist(ets:tab2list(Table), tree, SkipWidth).
|
||||
|
||||
from_orderedlist(OrderedList, tree) ->
|
||||
from_orderedlist(OrderedList, tree, ?SKIP_WIDTH).
|
||||
|
||||
from_orderedlist(OrderedList, tree, SkipWidth) ->
|
||||
L = length(OrderedList),
|
||||
{tree, L, from_orderedlist(OrderedList, [], L, SkipWidth)}.
|
||||
|
||||
match(Key, {tree, _L, Tree}) ->
|
||||
Iter = tree_iterator_from(Key, Tree),
|
||||
|
@ -90,7 +98,7 @@ to_list({tree, _L, Tree}) ->
|
|||
tsize({tree, L, _Tree}) ->
|
||||
L.
|
||||
|
||||
empty() ->
|
||||
empty(tree) ->
|
||||
{tree, 0, empty_tree()}.
|
||||
|
||||
%%%============================================================================
|
||||
|
@ -98,13 +106,13 @@ empty() ->
|
|||
%%%============================================================================
|
||||
|
||||
|
||||
from_orderedlist([], TmpList, _L) ->
|
||||
from_orderedlist([], TmpList, _L, _SkipWidth) ->
|
||||
gb_trees:from_orddict(lists:reverse(TmpList));
|
||||
from_orderedlist(OrdList, TmpList, L) ->
|
||||
SubLL = min(?SKIP_WIDTH, L),
|
||||
from_orderedlist(OrdList, TmpList, L, SkipWidth) ->
|
||||
SubLL = min(SkipWidth, L),
|
||||
{Head, Tail} = lists:split(SubLL, OrdList),
|
||||
{LastK, _LastV} = lists:last(Head),
|
||||
from_orderedlist(Tail, [{LastK, Head}|TmpList], L - SubLL).
|
||||
from_orderedlist(Tail, [{LastK, Head}|TmpList], L - SubLL, SkipWidth).
|
||||
|
||||
lookup_match(_Key, []) ->
|
||||
none;
|
||||
|
@ -256,7 +264,7 @@ tree_search_test() ->
|
|||
{N * 4, N * 4 - 2}
|
||||
end,
|
||||
KL = lists:map(MapFun, lists:seq(1, 50)),
|
||||
T = from_orderedlist(KL),
|
||||
T = from_orderedlist(KL, tree),
|
||||
|
||||
StartKeyFun = fun(V) -> V end,
|
||||
|
||||
|
@ -276,20 +284,27 @@ tree_search_test() ->
|
|||
|
||||
|
||||
tree_test() ->
|
||||
tree_test_by_width(8),
|
||||
tree_test_by_width(16),
|
||||
tree_test_by_width(32),
|
||||
tree_test_by_width(4).
|
||||
|
||||
tree_test_by_width(Width) ->
|
||||
io:format(user, "~nTree test for width: ~w~n", [Width]),
|
||||
N = 4000,
|
||||
KL = lists:ukeysort(1, generate_randomkeys(1, N, 1, N div 5)),
|
||||
|
||||
OS = ets:new(test, [ordered_set, private]),
|
||||
ets:insert(OS, KL),
|
||||
SWaETS = os:timestamp(),
|
||||
Tree0 = from_orderedset(OS),
|
||||
Tree0 = from_orderedset(OS, tree, Width),
|
||||
io:format(user, "Generating tree from ETS in ~w microseconds" ++
|
||||
" of size ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWaETS),
|
||||
tsize(Tree0)]),
|
||||
|
||||
SWaGSL = os:timestamp(),
|
||||
Tree1 = from_orderedlist(KL),
|
||||
Tree1 = from_orderedlist(KL, tree, Width),
|
||||
io:format(user, "Generating tree from orddict in ~w microseconds" ++
|
||||
" of size ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWaGSL),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue