2016-12-23 12:30:58 +00:00
|
|
|
%% -------- SST (Variant) ---------
|
|
|
|
%%
|
|
|
|
%% A FSM module intended to wrap a persisted, ordered view of Keys and Values
|
|
|
|
%%
|
|
|
|
%% The persisted view is built from a list (which may be created by merging
|
2016-12-24 00:41:50 +00:00
|
|
|
%% multiple lists). The list is built first, then the view is created in bulk.
|
2016-12-24 00:02:06 +00:00
|
|
|
%%
|
|
|
|
%% -------- Slots ---------
|
|
|
|
%%
|
|
|
|
%% The view is built from sublists referred to as slot. Each slot is up to 128
|
2017-01-25 12:38:33 +00:00
|
|
|
%% keys and values in size. Three strategies have been benchmarked for the
|
2017-01-02 10:47:04 +00:00
|
|
|
%% slot: a skiplist, a gb-tree, four blocks of flat lists with an index.
|
2016-12-24 00:02:06 +00:00
|
|
|
%%
|
|
|
|
%% Skiplist:
|
2017-01-02 10:47:04 +00:00
|
|
|
%% build and serialise slot - 3233 microseconds
|
2016-12-24 00:02:06 +00:00
|
|
|
%% de-serialise and check * 128 - 14669 microseconds
|
|
|
|
%% flatten back to list - 164 microseconds
|
|
|
|
%%
|
|
|
|
%% GBTree:
|
2017-01-02 10:47:04 +00:00
|
|
|
%% build and serialise tree - 1433 microseconds
|
2016-12-24 00:02:06 +00:00
|
|
|
%% de-serialise and check * 128 - 15263 microseconds
|
|
|
|
%% flatten back to list - 175 microseconds
|
|
|
|
%%
|
2017-01-02 10:47:04 +00:00
|
|
|
%% Indexed Blocks:
|
|
|
|
%% build and serialise slot 342 microseconds
|
2017-01-25 12:38:33 +00:00
|
|
|
%% de-deserialise and check * 128 - 6746 microseconds
|
2017-01-02 10:47:04 +00:00
|
|
|
%% flatten back to list - 187 microseconds
|
|
|
|
%%
|
|
|
|
%% The negative side of using Indexed Blocks is the storage of the index. In
|
|
|
|
%% the original implementation this was stored on fadvised disk (the index in
|
|
|
|
%% this case was a rice-encoded view of which block the object is in). In this
|
|
|
|
%% implementation it is cached in memory -requiring 2-bytes per key to be kept
|
|
|
|
%% in memory.
|
2016-12-24 00:41:50 +00:00
|
|
|
%%
|
|
|
|
%% -------- Blooms ---------
|
|
|
|
%%
|
2017-01-25 12:38:33 +00:00
|
|
|
%% There is a bloom for each slot - based on two hashes and 8 bits per key.
|
2016-12-24 00:41:50 +00:00
|
|
|
%%
|
2017-01-25 12:38:33 +00:00
|
|
|
%% Hashing for blooms is a challenge, as the slot is a slice of an ordered
|
|
|
|
%% list of keys with a fixed format. It is likely that the keys may vary by
|
|
|
|
%% only one or two ascii characters, and there is a desire to avoid the
|
|
|
|
%% overhead of cryptographic hash functions that may be able to handle this.
|
2017-01-02 10:47:04 +00:00
|
|
|
%%
|
|
|
|
%% -------- Summary ---------
|
|
|
|
%%
|
|
|
|
%% Each file has a summary - which is the 128 keys at the top of each slot in
|
|
|
|
%% a skiplist, with some basic metadata about the slot stored as the value.
|
|
|
|
%%
|
2017-01-25 12:38:33 +00:00
|
|
|
%% The summary is stored seperately to the slots (within the same file).
|
2016-12-24 00:41:50 +00:00
|
|
|
%%
|
2017-01-02 10:47:04 +00:00
|
|
|
%% -------- CRC Checks ---------
|
|
|
|
%%
|
|
|
|
%% Every attempt to either read a summary or a slot off disk will also include
|
|
|
|
%% a CRC check. If the CRC check fails non-presence is assumed (the data
|
|
|
|
%% within is assumed to be entirely lost). The data can be recovered by either
|
|
|
|
%% using a recoverable strategy in transaction log compaction, and triggering
|
|
|
|
%% the transaction log replay; or by using a higher level for of anti-entropy
|
|
|
|
%% (i.e. make Riak responsible).
|
2016-12-24 00:02:06 +00:00
|
|
|
|
2016-12-23 12:30:58 +00:00
|
|
|
|
|
|
|
-module(leveled_sst).
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
-behaviour(gen_fsm).
|
|
|
|
|
2016-12-23 12:30:58 +00:00
|
|
|
-include("include/leveled.hrl").
|
|
|
|
|
2016-12-28 21:47:05 +00:00
|
|
|
-define(MAX_SLOTS, 256).
|
2017-03-21 17:47:08 +00:00
|
|
|
-define(LOOK_SLOTSIZE, 128). % This is not configurable
|
|
|
|
-define(LOOK_BLOCKSIZE, {24, 32}).
|
2017-03-21 16:54:23 +00:00
|
|
|
-define(NOLOOK_SLOTSIZE, 256).
|
2017-03-21 17:47:08 +00:00
|
|
|
-define(NOLOOK_BLOCKSIZE, {56, 32}).
|
2016-12-23 12:30:58 +00:00
|
|
|
-define(COMPRESSION_LEVEL, 1).
|
2017-01-02 10:47:04 +00:00
|
|
|
-define(BINARY_SETTINGS, [{compressed, ?COMPRESSION_LEVEL}]).
|
2017-01-03 09:12:41 +00:00
|
|
|
% -define(LEVEL_BLOOM_BITS, [{0, 8}, {1, 10}, {2, 8}, {default, 6}]).
|
2017-03-21 17:53:34 +00:00
|
|
|
-define(MERGE_SCANWIDTH, 16).
|
2016-12-24 17:48:31 +00:00
|
|
|
-define(DISCARD_EXT, ".discarded").
|
2016-12-29 02:07:14 +00:00
|
|
|
-define(DELETE_TIMEOUT, 10000).
|
2017-01-23 22:58:51 +00:00
|
|
|
-define(TREE_TYPE, idxt).
|
|
|
|
-define(TREE_SIZE, 4).
|
2016-12-23 12:30:58 +00:00
|
|
|
|
|
|
|
-include_lib("eunit/include/eunit.hrl").
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
-export([init/1,
|
|
|
|
handle_sync_event/4,
|
|
|
|
handle_event/3,
|
|
|
|
handle_info/3,
|
|
|
|
terminate/3,
|
|
|
|
code_change/4,
|
2016-12-29 02:07:14 +00:00
|
|
|
starting/2,
|
2016-12-24 15:12:24 +00:00
|
|
|
starting/3,
|
2016-12-29 02:07:14 +00:00
|
|
|
reader/3,
|
|
|
|
delete_pending/2,
|
|
|
|
delete_pending/3]).
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2017-03-09 21:23:09 +00:00
|
|
|
-export([sst_new/5,
|
|
|
|
sst_new/7,
|
|
|
|
sst_newlevelzero/6,
|
|
|
|
sst_open/2,
|
2016-12-24 15:12:24 +00:00
|
|
|
sst_get/2,
|
|
|
|
sst_get/3,
|
2016-12-28 15:48:04 +00:00
|
|
|
sst_getkvrange/4,
|
|
|
|
sst_getslots/2,
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_getmaxsequencenumber/1,
|
|
|
|
sst_setfordelete/2,
|
|
|
|
sst_clear/1,
|
|
|
|
sst_checkready/1,
|
|
|
|
sst_deleteconfirmed/1,
|
2016-12-24 15:12:24 +00:00
|
|
|
sst_close/1]).
|
|
|
|
|
2016-12-29 02:07:14 +00:00
|
|
|
-export([expand_list_by_pointer/3]).
|
2016-12-24 15:12:24 +00:00
|
|
|
|
|
|
|
|
2016-12-23 18:08:22 +00:00
|
|
|
-record(slot_index_value, {slot_id :: integer(),
|
|
|
|
start_position :: integer(),
|
2017-01-24 21:51:12 +00:00
|
|
|
length :: integer(),
|
|
|
|
bloom :: binary()}).
|
2016-12-23 18:08:22 +00:00
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
-record(summary, {first_key :: tuple(),
|
|
|
|
last_key :: tuple(),
|
2017-07-31 19:30:29 +02:00
|
|
|
index :: tuple() | undefined,
|
2016-12-29 02:07:14 +00:00
|
|
|
size :: integer(),
|
2017-01-03 09:12:41 +00:00
|
|
|
max_sqn :: integer()}).
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2017-03-21 11:03:29 +00:00
|
|
|
%% yield_blockquery is used to detemrine if the work necessary to process a
|
|
|
|
%% range query beyond the fetching the slot should be managed from within
|
|
|
|
%% this process, or should be handled by the calling process.
|
|
|
|
%% Handling within the calling process may lead to extra binary heap garbage
|
|
|
|
%% see Issue 52. Handling within the SST process may lead to contention and
|
|
|
|
%% extra copying. Files at the top of the tree yield, those lower down don't.
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
-record(state, {summary,
|
2017-07-31 19:30:29 +02:00
|
|
|
handle :: file:fd() | undefined,
|
|
|
|
sst_timings :: tuple() | undefined,
|
|
|
|
penciller :: pid() | undefined,
|
2017-03-09 21:23:09 +00:00
|
|
|
root_path,
|
2016-12-24 15:12:24 +00:00
|
|
|
filename,
|
2017-03-21 11:03:29 +00:00
|
|
|
yield_blockquery = false :: boolean(),
|
2017-01-02 10:47:04 +00:00
|
|
|
blockindex_cache}).
|
2016-12-24 15:12:24 +00:00
|
|
|
|
|
|
|
|
2016-12-23 12:30:58 +00:00
|
|
|
%%%============================================================================
|
|
|
|
%%% API
|
|
|
|
%%%============================================================================
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_open(string(), string()) -> {ok, pid(), {tuple(), tuple()}}.
|
|
|
|
%% @doc
|
|
|
|
%% Open an SST file at a given path and filename. The first and last keys
|
|
|
|
%% are returned in response to the request - so that those keys can be used
|
|
|
|
%% in manifests to understand what range of keys are covered by the SST file.
|
|
|
|
%% All keys in the file should be between the first and last key in erlang
|
|
|
|
%% term order.
|
|
|
|
%%
|
|
|
|
%% The filename should include the file extension.
|
2017-03-09 21:23:09 +00:00
|
|
|
sst_open(RootPath, Filename) ->
|
2016-12-24 15:12:24 +00:00
|
|
|
{ok, Pid} = gen_fsm:start(?MODULE, [], []),
|
2017-03-09 21:23:09 +00:00
|
|
|
case gen_fsm:sync_send_event(Pid,
|
|
|
|
{sst_open, RootPath, Filename},
|
|
|
|
infinity) of
|
2016-12-24 15:12:24 +00:00
|
|
|
{ok, {SK, EK}} ->
|
|
|
|
{ok, Pid, {SK, EK}}
|
|
|
|
end.
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_new(string(), string(), integer(), list(), integer()) ->
|
|
|
|
{ok, pid(), {tuple(), tuple()}}.
|
|
|
|
%% @doc
|
|
|
|
%% Start a new SST file at the assigned level passing in a list of Key, Value
|
|
|
|
%% pairs. This should not be used for basement levels or unexpanded Key/Value
|
|
|
|
%% lists as merge_lists will not be called.
|
2017-03-09 21:23:09 +00:00
|
|
|
sst_new(RootPath, Filename, Level, KVList, MaxSQN) ->
|
2016-12-24 15:12:24 +00:00
|
|
|
{ok, Pid} = gen_fsm:start(?MODULE, [], []),
|
2017-03-13 23:51:48 +00:00
|
|
|
{[], [], SlotList, FK} = merge_lists(KVList),
|
2016-12-24 15:12:24 +00:00
|
|
|
case gen_fsm:sync_send_event(Pid,
|
2016-12-29 02:07:14 +00:00
|
|
|
{sst_new,
|
2017-03-09 21:23:09 +00:00
|
|
|
RootPath,
|
2016-12-29 02:07:14 +00:00
|
|
|
Filename,
|
|
|
|
Level,
|
2017-03-13 23:51:48 +00:00
|
|
|
{SlotList, FK},
|
2016-12-29 02:07:14 +00:00
|
|
|
MaxSQN},
|
2016-12-24 15:12:24 +00:00
|
|
|
infinity) of
|
|
|
|
{ok, {SK, EK}} ->
|
|
|
|
{ok, Pid, {SK, EK}}
|
|
|
|
end.
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_new(string(), string(), list(), list(),
|
|
|
|
boolean(), integer(), integer()) ->
|
|
|
|
empty|{ok, pid(), {{list(), list()}, tuple(), tuple()}}.
|
|
|
|
%% @doc
|
|
|
|
%% Start a new SST file at the assigned level passing in a two lists of
|
|
|
|
%% {Key, Value} pairs to be merged. The merge_lists function will use the
|
|
|
|
%% IsBasement boolean to determine if expired keys or tombstones can be
|
|
|
|
%% deleted.
|
|
|
|
%%
|
|
|
|
%% The remainder of the lists is returned along with the StartKey and EndKey
|
|
|
|
%% so that the remainder cna be used in the next file in the merge. It might
|
|
|
|
%% be that the merge_lists returns nothin (for example when a basement file is
|
|
|
|
%% all tombstones) - and the atome empty is returned in this case so that the
|
|
|
|
%% file is not added to the manifest.
|
2017-03-10 20:43:37 +00:00
|
|
|
sst_new(RootPath, Filename, KVL1, KVL2, IsBasement, Level, MaxSQN) ->
|
2017-03-13 23:51:48 +00:00
|
|
|
{Rem1, Rem2, SlotList, FK} = merge_lists(KVL1, KVL2, {IsBasement, Level}),
|
2017-03-10 20:43:37 +00:00
|
|
|
case SlotList of
|
2017-03-14 00:17:09 +00:00
|
|
|
[] ->
|
2016-12-29 05:09:47 +00:00
|
|
|
empty;
|
|
|
|
_ ->
|
|
|
|
{ok, Pid} = gen_fsm:start(?MODULE, [], []),
|
|
|
|
case gen_fsm:sync_send_event(Pid,
|
|
|
|
{sst_new,
|
2017-03-09 21:23:09 +00:00
|
|
|
RootPath,
|
2016-12-29 05:09:47 +00:00
|
|
|
Filename,
|
|
|
|
Level,
|
2017-03-13 23:51:48 +00:00
|
|
|
{SlotList, FK},
|
2016-12-29 05:09:47 +00:00
|
|
|
MaxSQN},
|
|
|
|
infinity) of
|
|
|
|
{ok, {SK, EK}} ->
|
|
|
|
{ok, Pid, {{Rem1, Rem2}, SK, EK}}
|
|
|
|
end
|
2016-12-28 21:47:05 +00:00
|
|
|
end.
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_newlevelzero(string(), string(),
|
|
|
|
integer(), fun(), pid()|undefined, integer()) ->
|
|
|
|
{ok, pid(), noreply}.
|
|
|
|
%% @doc
|
|
|
|
%% Start a new file at level zero. At this level the file size is not fixed -
|
|
|
|
%% it will be as big as the input. Also the KVList is not passed in, it is
|
|
|
|
%% fetched slot by slot using the FetchFun
|
2017-03-09 21:23:09 +00:00
|
|
|
sst_newlevelzero(RootPath, Filename, Slots, FetchFun, Penciller, MaxSQN) ->
|
2016-12-28 21:47:05 +00:00
|
|
|
{ok, Pid} = gen_fsm:start(?MODULE, [], []),
|
|
|
|
gen_fsm:send_event(Pid,
|
|
|
|
{sst_newlevelzero,
|
2017-03-09 21:23:09 +00:00
|
|
|
RootPath,
|
2016-12-28 21:47:05 +00:00
|
|
|
Filename,
|
|
|
|
Slots,
|
|
|
|
FetchFun,
|
2016-12-29 02:07:14 +00:00
|
|
|
Penciller,
|
|
|
|
MaxSQN}),
|
2016-12-28 21:47:05 +00:00
|
|
|
{ok, Pid, noreply}.
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_get(pid(), tuple()) -> tuple()|not_present.
|
|
|
|
%% @doc
|
|
|
|
%% Return a Key, Value pair matching a Key or not_present if the Key is not in
|
|
|
|
%% the store. The magic_hash function is used to accelerate the seeking of
|
|
|
|
%% keys, sst_get/3 should be used directly if this has already been calculated
|
2016-12-24 15:12:24 +00:00
|
|
|
sst_get(Pid, LedgerKey) ->
|
|
|
|
sst_get(Pid, LedgerKey, leveled_codec:magic_hash(LedgerKey)).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_get(pid(), tuple(), integer()) -> tuple()|not_present.
|
|
|
|
%% @doc
|
|
|
|
%% Return a Key, Value pair matching a Key or not_present if the Key is not in
|
|
|
|
%% the store (with the magic hash precalculated).
|
2016-12-24 15:12:24 +00:00
|
|
|
sst_get(Pid, LedgerKey, Hash) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, {get_kv, LedgerKey, Hash}, infinity).
|
2016-12-23 12:30:58 +00:00
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_getkvrange(pid(), tuple()|all, tuple()|all, integer()) -> list().
|
|
|
|
%% @doc
|
|
|
|
%% Get a range of {Key, Value} pairs as a list between StartKey and EndKey
|
|
|
|
%% (inclusive). The ScanWidth is the maximum size of the range, a pointer
|
|
|
|
%% will be placed on the tail of the resulting list if results expand beyond
|
|
|
|
%% the Scan Width
|
|
|
|
%%
|
|
|
|
%% To make the range open-ended (either ta start, end or both) the all atom
|
|
|
|
%% can be use din place of the Key tuple.
|
2016-12-28 15:48:04 +00:00
|
|
|
sst_getkvrange(Pid, StartKey, EndKey, ScanWidth) ->
|
2017-03-21 11:03:29 +00:00
|
|
|
case gen_fsm:sync_send_event(Pid,
|
2017-03-17 10:47:20 +00:00
|
|
|
{get_kvrange, StartKey, EndKey, ScanWidth},
|
2017-03-21 11:03:29 +00:00
|
|
|
infinity) of
|
|
|
|
{yield, SlotsToFetchBinList, SlotsToPoint} ->
|
|
|
|
FetchFun =
|
|
|
|
fun({SlotBin, SK, EK}, Acc) ->
|
|
|
|
Acc ++ binaryslot_trimmedlist(SlotBin, SK, EK)
|
|
|
|
end,
|
|
|
|
lists:foldl(FetchFun, [], SlotsToFetchBinList) ++ SlotsToPoint;
|
|
|
|
Reply ->
|
|
|
|
Reply
|
|
|
|
end.
|
2016-12-28 15:48:04 +00:00
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_getslots(pid(), list()) -> list().
|
|
|
|
%% @doc
|
|
|
|
%% Get a list of slots by their ID. The slot will be converted from the binary
|
|
|
|
%% to term form outside of the FSM loop
|
2016-12-28 15:48:04 +00:00
|
|
|
sst_getslots(Pid, SlotList) ->
|
2017-03-17 10:43:34 +00:00
|
|
|
SlotBins = gen_fsm:sync_send_event(Pid, {get_slots, SlotList}, infinity),
|
|
|
|
FetchFun =
|
|
|
|
fun({SlotBin, SK, EK}, Acc) ->
|
|
|
|
Acc ++ binaryslot_trimmedlist(SlotBin, SK, EK)
|
|
|
|
end,
|
|
|
|
lists:foldl(FetchFun, [], SlotBins).
|
2016-12-28 21:47:05 +00:00
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_getmaxsequencenumber(pid()) -> integer().
|
|
|
|
%% @doc
|
|
|
|
%% Get the maximume sequence number for this SST file
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_getmaxsequencenumber(Pid) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, get_maxsequencenumber, infinity).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_setfordelete(pid(), pid()|false) -> ok.
|
|
|
|
%% @doc
|
|
|
|
%% If the SST is no longer in use in the active ledger it can be set for
|
|
|
|
%% delete. Once set for delete it will poll the Penciller pid to see if
|
|
|
|
%% it is yet safe to be deleted (i.e. because all snapshots which depend
|
|
|
|
%% on it have finished). No polling will be done if the Penciller pid
|
|
|
|
%% is 'false'
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_setfordelete(Pid, Penciller) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, {set_for_delete, Penciller}, infinity).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_clear(pid()) -> ok.
|
|
|
|
%% @doc
|
|
|
|
%% For this file to be closed and deleted
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_clear(Pid) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, {set_for_delete, false}, infinity),
|
|
|
|
gen_fsm:sync_send_event(Pid, close, 1000).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_deleteconfirmed(pid()) -> ok.
|
|
|
|
%% @doc
|
|
|
|
%% Allows a penciller to confirm to a SST file that it can be cleared, as it
|
|
|
|
%% is no longer in use
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_deleteconfirmed(Pid) ->
|
|
|
|
gen_fsm:send_event(Pid, close).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_checkready(pid()) -> {ok, string(), tuple(), tuple()}.
|
|
|
|
%% @doc
|
|
|
|
%% If a file has been set to be built, check that it has been built. Returns
|
|
|
|
%% the filename and the {startKey, EndKey} for the manifest.
|
2016-12-29 02:07:14 +00:00
|
|
|
sst_checkready(Pid) ->
|
|
|
|
%% Only used in test
|
|
|
|
gen_fsm:sync_send_event(Pid, background_complete, 100).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_close(pid()) -> ok.
|
|
|
|
%% @doc
|
|
|
|
%% Close the file
|
2016-12-24 15:12:24 +00:00
|
|
|
sst_close(Pid) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, close, 2000).
|
|
|
|
|
2017-05-18 12:29:56 +01:00
|
|
|
-spec sst_printtimings(pid()) -> ok.
|
|
|
|
%% @doc
|
|
|
|
%% The state of the FSM keeps track of timings of operations, and this can
|
|
|
|
%% forced to be printed.
|
2016-12-28 15:48:04 +00:00
|
|
|
%% Used in unit tests to force the printing of timings
|
|
|
|
sst_printtimings(Pid) ->
|
|
|
|
gen_fsm:sync_send_event(Pid, print_timings, 1000).
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
|
|
|
|
%%%============================================================================
|
|
|
|
%%% gen_server callbacks
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
init([]) ->
|
|
|
|
{ok, starting, #state{}}.
|
|
|
|
|
2017-03-09 21:23:09 +00:00
|
|
|
starting({sst_open, RootPath, Filename}, _From, State) ->
|
|
|
|
UpdState = read_file(Filename, State#state{root_path=RootPath}),
|
2016-12-24 15:12:24 +00:00
|
|
|
Summary = UpdState#state.summary,
|
|
|
|
{reply,
|
|
|
|
{ok, {Summary#summary.first_key, Summary#summary.last_key}},
|
|
|
|
reader,
|
|
|
|
UpdState};
|
2017-03-13 23:51:48 +00:00
|
|
|
starting({sst_new, RootPath, Filename, Level, {SlotList, FirstKey}, MaxSQN},
|
|
|
|
_From, State) ->
|
2016-12-29 02:55:28 +00:00
|
|
|
SW = os:timestamp(),
|
2017-03-13 23:51:48 +00:00
|
|
|
{Length,
|
2017-01-02 10:47:04 +00:00
|
|
|
SlotIndex,
|
|
|
|
BlockIndex,
|
2017-03-10 20:43:37 +00:00
|
|
|
SlotsBin} = build_all_slots(SlotList),
|
2016-12-29 02:07:14 +00:00
|
|
|
SummaryBin = build_table_summary(SlotIndex,
|
|
|
|
Level,
|
|
|
|
FirstKey,
|
2017-01-02 10:47:04 +00:00
|
|
|
Length,
|
2016-12-29 02:07:14 +00:00
|
|
|
MaxSQN),
|
2017-03-09 21:23:09 +00:00
|
|
|
ActualFilename = write_file(RootPath, Filename, SummaryBin, SlotsBin),
|
2017-03-21 18:24:11 +00:00
|
|
|
YBQ = Level =< 2,
|
2017-03-21 11:03:29 +00:00
|
|
|
UpdState = read_file(ActualFilename,
|
|
|
|
State#state{root_path=RootPath,
|
|
|
|
yield_blockquery=YBQ}),
|
2016-12-24 15:12:24 +00:00
|
|
|
Summary = UpdState#state.summary,
|
2016-12-29 02:55:28 +00:00
|
|
|
leveled_log:log_timer("SST08",
|
|
|
|
[ActualFilename, Level, Summary#summary.max_sqn],
|
|
|
|
SW),
|
2016-12-24 15:12:24 +00:00
|
|
|
{reply,
|
|
|
|
{ok, {Summary#summary.first_key, Summary#summary.last_key}},
|
|
|
|
reader,
|
2017-01-02 10:47:04 +00:00
|
|
|
UpdState#state{blockindex_cache = BlockIndex}}.
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2017-03-09 21:23:09 +00:00
|
|
|
starting({sst_newlevelzero, RootPath, Filename,
|
|
|
|
Slots, FetchFun, Penciller, MaxSQN}, State) ->
|
2016-12-29 02:55:28 +00:00
|
|
|
SW = os:timestamp(),
|
2016-12-28 21:47:05 +00:00
|
|
|
KVList = leveled_pmem:to_list(Slots, FetchFun),
|
2017-03-13 23:51:48 +00:00
|
|
|
{[], [], SlotList, FirstKey} = merge_lists(KVList),
|
|
|
|
{SlotCount,
|
2017-01-02 10:47:04 +00:00
|
|
|
SlotIndex,
|
|
|
|
BlockIndex,
|
2017-03-10 20:43:37 +00:00
|
|
|
SlotsBin} = build_all_slots(SlotList),
|
2016-12-29 02:07:14 +00:00
|
|
|
SummaryBin = build_table_summary(SlotIndex,
|
|
|
|
0,
|
|
|
|
FirstKey,
|
2017-03-10 20:43:37 +00:00
|
|
|
SlotCount,
|
2016-12-29 02:07:14 +00:00
|
|
|
MaxSQN),
|
2017-03-09 21:23:09 +00:00
|
|
|
ActualFilename = write_file(RootPath, Filename, SummaryBin, SlotsBin),
|
2017-03-21 11:03:29 +00:00
|
|
|
UpdState = read_file(ActualFilename,
|
|
|
|
State#state{root_path = RootPath,
|
|
|
|
yield_blockquery = true}),
|
2016-12-28 21:47:05 +00:00
|
|
|
Summary = UpdState#state.summary,
|
2016-12-29 02:55:28 +00:00
|
|
|
leveled_log:log_timer("SST08",
|
|
|
|
[ActualFilename, 0, Summary#summary.max_sqn],
|
|
|
|
SW),
|
2016-12-29 02:07:14 +00:00
|
|
|
case Penciller of
|
|
|
|
undefined ->
|
2017-01-02 10:47:04 +00:00
|
|
|
{next_state, reader, UpdState#state{blockindex_cache = BlockIndex}};
|
2016-12-29 02:07:14 +00:00
|
|
|
_ ->
|
|
|
|
leveled_penciller:pcl_confirml0complete(Penciller,
|
|
|
|
UpdState#state.filename,
|
|
|
|
Summary#summary.first_key,
|
|
|
|
Summary#summary.last_key),
|
2017-01-02 10:47:04 +00:00
|
|
|
{next_state, reader, UpdState#state{blockindex_cache = BlockIndex}}
|
2016-12-29 02:07:14 +00:00
|
|
|
end.
|
|
|
|
|
2016-12-28 21:47:05 +00:00
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
reader({get_kv, LedgerKey, Hash}, _From, State) ->
|
|
|
|
SW = os:timestamp(),
|
2017-01-03 15:26:44 +00:00
|
|
|
{Result, Stage, _SlotID, UpdState} = fetch(LedgerKey, Hash, State),
|
2016-12-24 15:12:24 +00:00
|
|
|
UpdTimings = leveled_log:sst_timing(State#state.sst_timings, SW, Stage),
|
2017-01-03 15:26:44 +00:00
|
|
|
{reply, Result, reader, UpdState#state{sst_timings = UpdTimings}};
|
2016-12-28 15:48:04 +00:00
|
|
|
reader({get_kvrange, StartKey, EndKey, ScanWidth}, _From, State) ->
|
2017-03-20 23:22:46 +00:00
|
|
|
{SlotsToFetchBinList, SlotsToPoint} = fetch_range(StartKey,
|
|
|
|
EndKey,
|
|
|
|
ScanWidth,
|
|
|
|
State),
|
2017-03-21 11:03:29 +00:00
|
|
|
case State#state.yield_blockquery of
|
|
|
|
true ->
|
|
|
|
{reply,
|
|
|
|
{yield, SlotsToFetchBinList, SlotsToPoint},
|
|
|
|
reader,
|
|
|
|
State};
|
|
|
|
false ->
|
|
|
|
FetchFun =
|
|
|
|
fun({SlotBin, SK, EK}, Acc) ->
|
|
|
|
Acc ++ binaryslot_trimmedlist(SlotBin, SK, EK)
|
|
|
|
end,
|
|
|
|
{reply,
|
|
|
|
lists:foldl(FetchFun, [], SlotsToFetchBinList) ++ SlotsToPoint,
|
|
|
|
reader,
|
|
|
|
State}
|
|
|
|
end;
|
2016-12-28 15:48:04 +00:00
|
|
|
reader({get_slots, SlotList}, _From, State) ->
|
2016-12-28 21:47:05 +00:00
|
|
|
SlotBins = read_slots(State#state.handle, SlotList),
|
2017-03-17 10:43:34 +00:00
|
|
|
{reply, SlotBins, reader, State};
|
2016-12-29 02:07:14 +00:00
|
|
|
reader(get_maxsequencenumber, _From, State) ->
|
|
|
|
Summary = State#state.summary,
|
|
|
|
{reply, Summary#summary.max_sqn, reader, State};
|
2016-12-28 15:48:04 +00:00
|
|
|
reader(print_timings, _From, State) ->
|
2017-03-16 08:43:18 +00:00
|
|
|
io:format(user, "~nTimings of ~w~n", [State#state.sst_timings]),
|
2016-12-28 15:48:04 +00:00
|
|
|
{reply, ok, reader, State#state{sst_timings = undefined}};
|
2016-12-29 02:07:14 +00:00
|
|
|
reader({set_for_delete, Penciller}, _From, State) ->
|
|
|
|
leveled_log:log("SST06", [State#state.filename]),
|
|
|
|
{reply,
|
|
|
|
ok,
|
|
|
|
delete_pending,
|
|
|
|
State#state{penciller=Penciller},
|
|
|
|
?DELETE_TIMEOUT};
|
|
|
|
reader(background_complete, _From, State) ->
|
|
|
|
Summary = State#state.summary,
|
|
|
|
{reply,
|
|
|
|
{ok,
|
|
|
|
State#state.filename,
|
|
|
|
Summary#summary.first_key,
|
|
|
|
Summary#summary.last_key},
|
|
|
|
reader,
|
|
|
|
State};
|
2016-12-24 17:48:31 +00:00
|
|
|
reader(close, _From, State) ->
|
|
|
|
ok = file:close(State#state.handle),
|
|
|
|
{stop, normal, ok, State}.
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2016-12-29 02:07:14 +00:00
|
|
|
|
|
|
|
delete_pending({get_kv, LedgerKey, Hash}, _From, State) ->
|
2017-01-03 15:26:44 +00:00
|
|
|
{Result, _Stage, _SlotID, UpdState} = fetch(LedgerKey, Hash, State),
|
|
|
|
{reply, Result, delete_pending, UpdState, ?DELETE_TIMEOUT};
|
2016-12-29 02:07:14 +00:00
|
|
|
delete_pending({get_kvrange, StartKey, EndKey, ScanWidth}, _From, State) ->
|
2017-03-20 23:43:31 +00:00
|
|
|
{SlotsToFetchBinList, SlotsToPoint} = fetch_range(StartKey,
|
|
|
|
EndKey,
|
|
|
|
ScanWidth,
|
|
|
|
State),
|
2017-03-21 11:03:29 +00:00
|
|
|
% Always yield as about to clear and de-reference
|
2016-12-29 02:07:14 +00:00
|
|
|
{reply,
|
2017-03-21 11:03:29 +00:00
|
|
|
{yield, SlotsToFetchBinList, SlotsToPoint},
|
2016-12-29 02:40:09 +00:00
|
|
|
delete_pending,
|
|
|
|
State,
|
|
|
|
?DELETE_TIMEOUT};
|
|
|
|
delete_pending({get_slots, SlotList}, _From, State) ->
|
|
|
|
SlotBins = read_slots(State#state.handle, SlotList),
|
2017-03-17 10:43:34 +00:00
|
|
|
{reply, SlotBins, delete_pending, State, ?DELETE_TIMEOUT};
|
2016-12-29 02:07:14 +00:00
|
|
|
delete_pending(close, _From, State) ->
|
|
|
|
leveled_log:log("SST07", [State#state.filename]),
|
|
|
|
ok = file:close(State#state.handle),
|
2017-03-09 21:23:09 +00:00
|
|
|
ok = file:delete(filename:join(State#state.root_path,
|
|
|
|
State#state.filename)),
|
2016-12-29 02:07:14 +00:00
|
|
|
{stop, normal, ok, State}.
|
|
|
|
|
|
|
|
delete_pending(timeout, State) ->
|
|
|
|
ok = leveled_penciller:pcl_confirmdelete(State#state.penciller,
|
2017-01-17 11:18:58 +00:00
|
|
|
State#state.filename,
|
|
|
|
self()),
|
2017-04-05 09:16:01 +01:00
|
|
|
% If the next thing is another timeout - may be long-running snapshot, so
|
|
|
|
% back-off
|
|
|
|
{next_state, delete_pending, State, random:uniform(10) * ?DELETE_TIMEOUT};
|
2016-12-29 02:07:14 +00:00
|
|
|
delete_pending(close, State) ->
|
|
|
|
leveled_log:log("SST07", [State#state.filename]),
|
|
|
|
ok = file:close(State#state.handle),
|
2017-03-09 21:23:09 +00:00
|
|
|
ok = file:delete(filename:join(State#state.root_path,
|
|
|
|
State#state.filename)),
|
2016-12-29 02:07:14 +00:00
|
|
|
{stop, normal, State}.
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
handle_sync_event(_Msg, _From, StateName, State) ->
|
|
|
|
{reply, undefined, StateName, State}.
|
|
|
|
|
|
|
|
handle_event(_Msg, StateName, State) ->
|
|
|
|
{next_state, StateName, State}.
|
|
|
|
|
|
|
|
handle_info(_Msg, StateName, State) ->
|
|
|
|
{next_state, StateName, State}.
|
|
|
|
|
2017-03-09 21:23:09 +00:00
|
|
|
terminate(normal, delete_pending, _State) ->
|
|
|
|
ok;
|
2016-12-24 15:12:24 +00:00
|
|
|
terminate(Reason, _StateName, State) ->
|
|
|
|
leveled_log:log("SST04", [Reason, State#state.filename]).
|
|
|
|
|
|
|
|
code_change(_OldVsn, StateName, State, _Extra) ->
|
|
|
|
{ok, StateName, State}.
|
2016-12-23 12:30:58 +00:00
|
|
|
|
|
|
|
|
|
|
|
%%%============================================================================
|
|
|
|
%%% Internal Functions
|
|
|
|
%%%============================================================================
|
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
fetch(LedgerKey, Hash, State) ->
|
|
|
|
Summary = State#state.summary,
|
2017-01-03 09:12:41 +00:00
|
|
|
Slot = lookup_slot(LedgerKey, Summary#summary.index),
|
|
|
|
SlotID = Slot#slot_index_value.slot_id,
|
2017-01-24 21:51:12 +00:00
|
|
|
Bloom = Slot#slot_index_value.bloom,
|
|
|
|
case leveled_tinybloom:check_hash(Hash, Bloom) of
|
|
|
|
false ->
|
|
|
|
{not_present, tiny_bloom, SlotID, State};
|
|
|
|
true ->
|
|
|
|
CachedBlockIdx = array:get(SlotID - 1,
|
2017-01-02 10:47:04 +00:00
|
|
|
State#state.blockindex_cache),
|
2017-01-24 21:51:12 +00:00
|
|
|
case CachedBlockIdx of
|
|
|
|
none ->
|
2017-01-03 15:26:44 +00:00
|
|
|
SlotBin = read_slot(State#state.handle, Slot),
|
2017-03-07 20:19:11 +00:00
|
|
|
{Result,
|
|
|
|
BlockLengths,
|
|
|
|
BlockIdx} = binaryslot_get(SlotBin, LedgerKey, Hash),
|
2017-01-24 21:51:12 +00:00
|
|
|
BlockIndexCache = array:set(SlotID - 1,
|
2017-03-07 20:19:11 +00:00
|
|
|
<<BlockLengths/binary,
|
|
|
|
BlockIdx/binary>>,
|
2017-01-24 21:51:12 +00:00
|
|
|
State#state.blockindex_cache),
|
|
|
|
{Result,
|
|
|
|
slot_fetch,
|
|
|
|
Slot#slot_index_value.slot_id,
|
|
|
|
State#state{blockindex_cache = BlockIndexCache}};
|
2017-03-21 16:54:23 +00:00
|
|
|
<<BlockLengths:24/binary, BlockIdx/binary>> ->
|
2017-03-07 20:19:11 +00:00
|
|
|
PosList = find_pos(BlockIdx,
|
2017-01-24 21:51:12 +00:00
|
|
|
double_hash(Hash, LedgerKey),
|
|
|
|
[],
|
|
|
|
0),
|
|
|
|
case PosList of
|
|
|
|
[] ->
|
|
|
|
{not_present, slot_bloom, SlotID, State};
|
|
|
|
_ ->
|
2017-03-07 20:19:11 +00:00
|
|
|
Result = check_blocks(PosList,
|
|
|
|
State#state.handle,
|
|
|
|
Slot,
|
|
|
|
BlockLengths,
|
|
|
|
LedgerKey),
|
|
|
|
{Result, slot_fetch, SlotID, State}
|
2017-01-24 21:51:12 +00:00
|
|
|
end
|
|
|
|
end
|
2016-12-24 15:12:24 +00:00
|
|
|
end.
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2016-12-28 15:48:04 +00:00
|
|
|
fetch_range(StartKey, EndKey, ScanWidth, State) ->
|
|
|
|
Summary = State#state.summary,
|
|
|
|
Handle = State#state.handle,
|
2017-01-24 21:51:12 +00:00
|
|
|
{Slots, RTrim} = lookup_slots(StartKey, EndKey, Summary#summary.index),
|
2016-12-28 15:48:04 +00:00
|
|
|
Self = self(),
|
|
|
|
SL = length(Slots),
|
2017-03-15 11:27:46 +00:00
|
|
|
|
2016-12-28 15:48:04 +00:00
|
|
|
ExpandedSlots =
|
|
|
|
case SL of
|
|
|
|
1 ->
|
|
|
|
[Slot] = Slots,
|
2017-01-24 21:51:12 +00:00
|
|
|
case RTrim of
|
|
|
|
true ->
|
2016-12-28 15:48:04 +00:00
|
|
|
[{pointer, Self, Slot, StartKey, EndKey}];
|
2017-01-24 21:51:12 +00:00
|
|
|
false ->
|
|
|
|
[{pointer, Self, Slot, StartKey, all}]
|
2016-12-28 15:48:04 +00:00
|
|
|
end;
|
|
|
|
N ->
|
|
|
|
{LSlot, MidSlots, RSlot} =
|
|
|
|
case N of
|
|
|
|
2 ->
|
|
|
|
[Slot1, Slot2] = Slots,
|
|
|
|
{Slot1, [], Slot2};
|
|
|
|
N ->
|
|
|
|
[Slot1|_Rest] = Slots,
|
|
|
|
SlotN = lists:last(Slots),
|
|
|
|
{Slot1, lists:sublist(Slots, 2, N - 2), SlotN}
|
|
|
|
end,
|
|
|
|
MidSlotPointers = lists:map(fun(S) ->
|
|
|
|
{pointer, Self, S, all, all}
|
|
|
|
end,
|
|
|
|
MidSlots),
|
2017-01-24 21:51:12 +00:00
|
|
|
case RTrim of
|
|
|
|
true ->
|
2016-12-28 15:48:04 +00:00
|
|
|
[{pointer, Self, LSlot, StartKey, all}] ++
|
|
|
|
MidSlotPointers ++
|
|
|
|
[{pointer, Self, RSlot, all, EndKey}];
|
2017-01-24 21:51:12 +00:00
|
|
|
false ->
|
2016-12-28 15:48:04 +00:00
|
|
|
[{pointer, Self, LSlot, StartKey, all}] ++
|
|
|
|
MidSlotPointers ++
|
|
|
|
[{pointer, Self, RSlot, all, all}]
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
{SlotsToFetch, SlotsToPoint} =
|
|
|
|
case ScanWidth of
|
|
|
|
SW when SW >= SL ->
|
|
|
|
{ExpandedSlots, []};
|
|
|
|
_ ->
|
|
|
|
lists:split(ScanWidth, ExpandedSlots)
|
|
|
|
end,
|
2017-01-02 10:47:04 +00:00
|
|
|
|
|
|
|
SlotsToFetchBinList = read_slots(Handle, SlotsToFetch),
|
2017-03-17 10:47:20 +00:00
|
|
|
{SlotsToFetchBinList, SlotsToPoint}.
|
2016-12-28 15:48:04 +00:00
|
|
|
|
2016-12-24 15:12:24 +00:00
|
|
|
|
2017-03-09 21:23:09 +00:00
|
|
|
write_file(RootPath, Filename, SummaryBin, SlotsBin) ->
|
2016-12-24 15:12:24 +00:00
|
|
|
SummaryLength = byte_size(SummaryBin),
|
|
|
|
SlotsLength = byte_size(SlotsBin),
|
2016-12-24 17:48:31 +00:00
|
|
|
{PendingName, FinalName} = generate_filenames(Filename),
|
2017-03-09 21:23:09 +00:00
|
|
|
ok = file:write_file(filename:join(RootPath, PendingName),
|
2016-12-29 02:31:10 +00:00
|
|
|
<<SlotsLength:32/integer,
|
|
|
|
SummaryLength:32/integer,
|
|
|
|
SlotsBin/binary,
|
|
|
|
SummaryBin/binary>>,
|
|
|
|
[raw]),
|
2017-03-09 21:23:09 +00:00
|
|
|
case filelib:is_file(filename:join(RootPath, FinalName)) of
|
2016-12-24 17:48:31 +00:00
|
|
|
true ->
|
2017-03-09 21:23:09 +00:00
|
|
|
AltName = filename:join(RootPath, filename:basename(FinalName))
|
2016-12-24 17:48:31 +00:00
|
|
|
++ ?DISCARD_EXT,
|
|
|
|
leveled_log:log("SST05", [FinalName, AltName]),
|
2017-03-09 21:23:09 +00:00
|
|
|
ok = file:rename(filename:join(RootPath, FinalName), AltName);
|
2016-12-24 17:48:31 +00:00
|
|
|
false ->
|
|
|
|
ok
|
|
|
|
end,
|
2017-03-09 21:23:09 +00:00
|
|
|
file:rename(filename:join(RootPath, PendingName),
|
|
|
|
filename:join(RootPath, FinalName)),
|
2016-12-24 17:48:31 +00:00
|
|
|
FinalName.
|
2016-12-24 15:12:24 +00:00
|
|
|
|
|
|
|
read_file(Filename, State) ->
|
2017-03-09 21:23:09 +00:00
|
|
|
{Handle, SummaryBin} = open_reader(filename:join(State#state.root_path,
|
|
|
|
Filename)),
|
2017-01-04 14:26:11 +00:00
|
|
|
{Summary, SlotList} = read_table_summary(SummaryBin),
|
2017-03-10 20:43:37 +00:00
|
|
|
BlockIndexCache = array:new([{size, Summary#summary.size},
|
|
|
|
{default, none}]),
|
2017-01-04 21:36:59 +00:00
|
|
|
UpdState = State#state{blockindex_cache = BlockIndexCache},
|
2017-01-04 14:26:11 +00:00
|
|
|
SlotIndex = from_list(SlotList),
|
|
|
|
UpdSummary = Summary#summary{index = SlotIndex},
|
2016-12-29 02:07:14 +00:00
|
|
|
leveled_log:log("SST03", [Filename,
|
|
|
|
Summary#summary.size,
|
|
|
|
Summary#summary.max_sqn]),
|
2017-01-02 10:47:04 +00:00
|
|
|
UpdState#state{summary = UpdSummary,
|
2017-01-04 14:26:11 +00:00
|
|
|
handle = Handle,
|
|
|
|
filename = Filename}.
|
2016-12-24 15:12:24 +00:00
|
|
|
|
|
|
|
open_reader(Filename) ->
|
|
|
|
{ok, Handle} = file:open(Filename, [binary, raw, read]),
|
2016-12-24 17:48:31 +00:00
|
|
|
{ok, Lengths} = file:pread(Handle, 0, 8),
|
2016-12-24 15:12:24 +00:00
|
|
|
<<SlotsLength:32/integer, SummaryLength:32/integer>> = Lengths,
|
2016-12-24 17:48:31 +00:00
|
|
|
{ok, SummaryBin} = file:pread(Handle, SlotsLength + 8, SummaryLength),
|
2016-12-24 15:12:24 +00:00
|
|
|
{Handle, SummaryBin}.
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
build_table_summary(SlotIndex, _Level, FirstKey, SlotCount, MaxSQN) ->
|
|
|
|
[{LastKey, _LastV}|_Rest] = SlotIndex,
|
2016-12-24 15:12:24 +00:00
|
|
|
Summary = #summary{first_key = FirstKey,
|
|
|
|
last_key = LastKey,
|
2017-03-10 20:43:37 +00:00
|
|
|
size = SlotCount,
|
2016-12-29 02:07:14 +00:00
|
|
|
max_sqn = MaxSQN},
|
2017-03-10 20:43:37 +00:00
|
|
|
SummBin = term_to_binary({Summary, lists:reverse(SlotIndex)},
|
2017-01-04 14:26:11 +00:00
|
|
|
?BINARY_SETTINGS),
|
2016-12-24 01:23:40 +00:00
|
|
|
SummCRC = erlang:crc32(SummBin),
|
|
|
|
<<SummCRC:32/integer, SummBin/binary>>.
|
|
|
|
|
|
|
|
read_table_summary(BinWithCheck) ->
|
|
|
|
<<SummCRC:32/integer, SummBin/binary>> = BinWithCheck,
|
|
|
|
CRCCheck = erlang:crc32(SummBin),
|
|
|
|
if
|
|
|
|
CRCCheck == SummCRC ->
|
2016-12-24 15:12:24 +00:00
|
|
|
% If not might it might be possible to rebuild from all the slots
|
2016-12-24 01:23:40 +00:00
|
|
|
binary_to_term(SummBin)
|
|
|
|
end.
|
2016-12-24 00:41:50 +00:00
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
|
|
|
|
build_all_slots(SlotList) ->
|
|
|
|
SlotCount = length(SlotList),
|
|
|
|
BuildResponse = build_all_slots(SlotList,
|
2017-01-02 10:47:04 +00:00
|
|
|
8,
|
|
|
|
1,
|
|
|
|
[],
|
|
|
|
array:new([{size, SlotCount},
|
|
|
|
{default, none}]),
|
|
|
|
<<>>),
|
2017-01-03 18:20:28 +00:00
|
|
|
{SlotIndex, BlockIndex, SlotsBin} = BuildResponse,
|
2017-03-13 23:51:48 +00:00
|
|
|
{SlotCount, SlotIndex, BlockIndex, SlotsBin}.
|
2017-03-10 20:43:37 +00:00
|
|
|
|
2017-03-13 23:51:48 +00:00
|
|
|
build_all_slots([], _Pos, _SlotID,
|
|
|
|
SlotIdxAcc, BlockIdxAcc, SlotBinAcc) ->
|
2017-03-10 20:43:37 +00:00
|
|
|
{SlotIdxAcc, BlockIdxAcc, SlotBinAcc};
|
2017-03-13 23:51:48 +00:00
|
|
|
build_all_slots([SlotD|Rest], Pos, SlotID,
|
|
|
|
SlotIdxAcc, BlockIdxAcc, SlotBinAcc) ->
|
|
|
|
{BlockIdx, SlotBin, HashList, LastKey} = SlotD,
|
2017-01-02 10:47:04 +00:00
|
|
|
Length = byte_size(SlotBin),
|
2017-01-24 21:51:12 +00:00
|
|
|
Bloom = leveled_tinybloom:create_bloom(HashList),
|
2016-12-23 18:08:22 +00:00
|
|
|
SlotIndexV = #slot_index_value{slot_id = SlotID,
|
2017-01-02 10:47:04 +00:00
|
|
|
start_position = Pos,
|
2017-01-24 21:51:12 +00:00
|
|
|
length = Length,
|
|
|
|
bloom = Bloom},
|
2017-03-10 20:43:37 +00:00
|
|
|
build_all_slots(Rest,
|
2017-01-02 10:47:04 +00:00
|
|
|
Pos + Length,
|
2016-12-23 18:08:22 +00:00
|
|
|
SlotID + 1,
|
2017-03-10 20:43:37 +00:00
|
|
|
[{LastKey, SlotIndexV}|SlotIdxAcc],
|
|
|
|
array:set(SlotID - 1, BlockIdx, BlockIdxAcc),
|
|
|
|
<<SlotBinAcc/binary, SlotBin/binary>>).
|
2016-12-23 12:30:58 +00:00
|
|
|
|
|
|
|
|
2016-12-24 17:48:31 +00:00
|
|
|
generate_filenames(RootFilename) ->
|
|
|
|
Ext = filename:extension(RootFilename),
|
|
|
|
Components = filename:split(RootFilename),
|
|
|
|
case Ext of
|
|
|
|
[] ->
|
|
|
|
{filename:join(Components) ++ ".pnd",
|
|
|
|
filename:join(Components) ++ ".sst"};
|
|
|
|
Ext ->
|
|
|
|
DN = filename:dirname(RootFilename),
|
|
|
|
FP_NOEXT = filename:basename(RootFilename, Ext),
|
|
|
|
{filename:join(DN, FP_NOEXT) ++ ".pnd",
|
|
|
|
filename:join(DN, FP_NOEXT) ++ ".sst"}
|
|
|
|
end.
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-01-04 14:26:11 +00:00
|
|
|
%%%============================================================================
|
|
|
|
%%% SlotIndex Implementation
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
%% The Slot Index is stored as a flat (sorted) list of {Key, Slot} where Key
|
|
|
|
%% is the last key within the slot.
|
|
|
|
%%
|
2017-01-23 22:58:51 +00:00
|
|
|
%% This implementation of the SlotIndex uses leveled_tree
|
2017-01-04 14:26:11 +00:00
|
|
|
|
|
|
|
from_list(SlotList) ->
|
2017-01-23 22:58:51 +00:00
|
|
|
leveled_tree:from_orderedlist(SlotList, ?TREE_TYPE, ?TREE_SIZE).
|
2017-01-04 14:26:11 +00:00
|
|
|
|
2017-01-23 22:58:51 +00:00
|
|
|
lookup_slot(Key, Tree) ->
|
|
|
|
StartKeyFun =
|
|
|
|
fun(_V) ->
|
|
|
|
all
|
|
|
|
end,
|
|
|
|
% The penciller should never ask for presence out of range - so will
|
|
|
|
% always return a slot (As we don't compare to StartKey)
|
|
|
|
{_LK, Slot} = leveled_tree:search(Key, Tree, StartKeyFun),
|
2017-01-04 14:26:11 +00:00
|
|
|
Slot.
|
|
|
|
|
2017-01-23 22:58:51 +00:00
|
|
|
lookup_slots(StartKey, EndKey, Tree) ->
|
|
|
|
StartKeyFun =
|
|
|
|
fun(_V) ->
|
|
|
|
all
|
|
|
|
end,
|
|
|
|
MapFun =
|
|
|
|
fun({_LK, Slot}) ->
|
|
|
|
Slot
|
|
|
|
end,
|
|
|
|
SlotList = leveled_tree:search_range(StartKey, EndKey, Tree, StartKeyFun),
|
|
|
|
{EK, _EndSlot} = lists:last(SlotList),
|
2017-03-16 08:37:36 +00:00
|
|
|
{lists:map(MapFun, SlotList), not leveled_codec:endkey_passed(EK, EndKey)}.
|
2017-01-04 14:26:11 +00:00
|
|
|
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
%%%============================================================================
|
|
|
|
%%% Slot Implementation
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
%% Implementing a slot has gone through numerous iterations. One of the most
|
|
|
|
%% critical considerations has been the cost of the binary_to_term and
|
|
|
|
%% term_to_binary calls for different sizes of slots and different data types.
|
|
|
|
%%
|
2017-03-04 20:47:46 +00:00
|
|
|
%% Microbenchmarking indicated that flat lists were the fastest at sst build
|
|
|
|
%% time. However, the lists need scanning at query time - and so give longer
|
|
|
|
%% lookups. Bigger slots did better at term_to_binary time. However
|
|
|
|
%% binary_to_term is an often repeated task, and this is better with smaller
|
|
|
|
%% slots.
|
2017-01-02 10:47:04 +00:00
|
|
|
%%
|
|
|
|
%% The outcome has been to divide the slot into four small blocks to minimise
|
|
|
|
%% the binary_to_term time. A binary index is provided for the slot for all
|
|
|
|
%% Keys that are directly fetchable (i.e. standard keys not index keys).
|
|
|
|
%%
|
|
|
|
%% The division and use of a list saves about 100 microseconds per fetch when
|
|
|
|
%% compared to using a 128-member gb:tree.
|
|
|
|
%%
|
|
|
|
%% The binary index is cacheable and doubles as a not_present filter, as it is
|
|
|
|
%% based on a 17-bit hash (so 0.0039 fpr).
|
|
|
|
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
generate_binary_slot(Lookup, KVL) ->
|
2017-01-02 10:47:04 +00:00
|
|
|
|
|
|
|
HashFoldFun =
|
2017-01-24 21:51:12 +00:00
|
|
|
fun({K, V}, {PosBinAcc, NoHashCount, HashAcc}) ->
|
2017-01-02 10:47:04 +00:00
|
|
|
|
|
|
|
{_SQN, H1} = leveled_codec:strip_to_seqnhashonly({K, V}),
|
|
|
|
case is_integer(H1) of
|
|
|
|
true ->
|
|
|
|
PosH1 = double_hash(H1, K),
|
|
|
|
case NoHashCount of
|
|
|
|
0 ->
|
2017-01-03 18:20:28 +00:00
|
|
|
{<<1:1/integer,
|
2017-01-02 10:47:04 +00:00
|
|
|
PosH1:15/integer,
|
|
|
|
PosBinAcc/binary>>,
|
2017-01-24 21:51:12 +00:00
|
|
|
0,
|
|
|
|
[H1|HashAcc]};
|
2017-01-02 10:47:04 +00:00
|
|
|
N ->
|
|
|
|
% The No Hash Count is an integer between 0 and 127
|
|
|
|
% and so at read time should count NHC + 1
|
|
|
|
NHC = N - 1,
|
2017-01-03 18:20:28 +00:00
|
|
|
{<<1:1/integer,
|
2017-01-02 10:47:04 +00:00
|
|
|
PosH1:15/integer,
|
|
|
|
0:1/integer,
|
|
|
|
NHC:7/integer,
|
|
|
|
PosBinAcc/binary>>,
|
2017-01-24 21:51:12 +00:00
|
|
|
0,
|
|
|
|
HashAcc}
|
2017-01-02 10:47:04 +00:00
|
|
|
end;
|
|
|
|
false ->
|
2017-01-24 21:51:12 +00:00
|
|
|
{PosBinAcc, NoHashCount + 1, HashAcc}
|
2017-01-02 10:47:04 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
end,
|
2017-03-10 20:43:37 +00:00
|
|
|
|
|
|
|
{HashL, PosBinIndex} =
|
|
|
|
case Lookup of
|
|
|
|
lookup ->
|
|
|
|
{PosBinIndex0,
|
|
|
|
NHC,
|
|
|
|
HashL0} = lists:foldr(HashFoldFun, {<<>>, 0, []}, KVL),
|
|
|
|
PosBinIndex1 =
|
|
|
|
case NHC of
|
|
|
|
0 ->
|
|
|
|
PosBinIndex0;
|
|
|
|
_ ->
|
|
|
|
N = NHC - 1,
|
|
|
|
<<0:1/integer, N:7/integer, PosBinIndex0/binary>>
|
|
|
|
end,
|
|
|
|
{HashL0, PosBinIndex1};
|
|
|
|
no_lookup ->
|
|
|
|
{[], <<0:1/integer, 127:7/integer>>}
|
2017-01-02 10:47:04 +00:00
|
|
|
end,
|
|
|
|
|
2017-03-21 16:54:23 +00:00
|
|
|
{SideBlockSize, MidBlockSize} =
|
2017-03-10 20:43:37 +00:00
|
|
|
case Lookup of
|
|
|
|
lookup ->
|
2017-03-21 16:54:23 +00:00
|
|
|
?LOOK_BLOCKSIZE;
|
2017-03-10 20:43:37 +00:00
|
|
|
no_lookup ->
|
2017-03-21 16:54:23 +00:00
|
|
|
?NOLOOK_BLOCKSIZE
|
2017-03-10 20:43:37 +00:00
|
|
|
end,
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-03-21 16:54:23 +00:00
|
|
|
{B1, B2, B3, B4, B5} =
|
2017-01-02 10:47:04 +00:00
|
|
|
case length(KVL) of
|
2017-03-21 16:54:23 +00:00
|
|
|
L when L =< SideBlockSize ->
|
2017-01-02 10:47:04 +00:00
|
|
|
{term_to_binary(KVL, ?BINARY_SETTINGS),
|
|
|
|
<<0:0>>,
|
|
|
|
<<0:0>>,
|
2017-03-21 16:54:23 +00:00
|
|
|
<<0:0>>,
|
2017-01-02 10:47:04 +00:00
|
|
|
<<0:0>>};
|
2017-03-21 16:54:23 +00:00
|
|
|
L when L =< 2 * SideBlockSize ->
|
|
|
|
{KVLA, KVLB} = lists:split(SideBlockSize, KVL),
|
2017-03-10 20:43:37 +00:00
|
|
|
{term_to_binary(KVLA, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLB, ?BINARY_SETTINGS),
|
2017-01-02 10:47:04 +00:00
|
|
|
<<0:0>>,
|
2017-03-21 16:54:23 +00:00
|
|
|
<<0:0>>,
|
|
|
|
<<0:0>>};
|
|
|
|
L when L =< (2 * SideBlockSize + MidBlockSize) ->
|
|
|
|
{KVLA, KVLB_Rest} = lists:split(SideBlockSize, KVL),
|
|
|
|
{KVLB, KVLC} = lists:split(SideBlockSize, KVLB_Rest),
|
|
|
|
{term_to_binary(KVLA, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLB, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLC, ?BINARY_SETTINGS),
|
|
|
|
<<0:0>>,
|
2017-01-02 10:47:04 +00:00
|
|
|
<<0:0>>};
|
2017-03-21 16:54:23 +00:00
|
|
|
L when L =< (3 * SideBlockSize + MidBlockSize) ->
|
|
|
|
{KVLA, KVLB_Rest} = lists:split(SideBlockSize, KVL),
|
|
|
|
{KVLB, KVLC_Rest} = lists:split(SideBlockSize, KVLB_Rest),
|
|
|
|
{KVLC, KVLD} = lists:split(MidBlockSize, KVLC_Rest),
|
2017-03-10 20:43:37 +00:00
|
|
|
{term_to_binary(KVLA, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLB, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLC, ?BINARY_SETTINGS),
|
2017-03-21 16:54:23 +00:00
|
|
|
term_to_binary(KVLD, ?BINARY_SETTINGS),
|
2017-01-02 10:47:04 +00:00
|
|
|
<<0:0>>};
|
2017-03-21 16:54:23 +00:00
|
|
|
L when L =< (4 * SideBlockSize + MidBlockSize) ->
|
|
|
|
{KVLA, KVLB_Rest} = lists:split(SideBlockSize, KVL),
|
|
|
|
{KVLB, KVLC_Rest} = lists:split(SideBlockSize, KVLB_Rest),
|
|
|
|
{KVLC, KVLD_Rest} = lists:split(MidBlockSize, KVLC_Rest),
|
|
|
|
{KVLD, KVLE} = lists:split(SideBlockSize, KVLD_Rest),
|
2017-03-10 20:43:37 +00:00
|
|
|
{term_to_binary(KVLA, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLB, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLC, ?BINARY_SETTINGS),
|
2017-03-21 16:54:23 +00:00
|
|
|
term_to_binary(KVLD, ?BINARY_SETTINGS),
|
|
|
|
term_to_binary(KVLE, ?BINARY_SETTINGS)}
|
2017-01-02 10:47:04 +00:00
|
|
|
end,
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
B1P = byte_size(PosBinIndex),
|
2017-01-02 10:47:04 +00:00
|
|
|
B1L = byte_size(B1),
|
|
|
|
B2L = byte_size(B2),
|
|
|
|
B3L = byte_size(B3),
|
|
|
|
B4L = byte_size(B4),
|
2017-03-21 16:54:23 +00:00
|
|
|
B5L = byte_size(B5),
|
2017-03-07 20:19:11 +00:00
|
|
|
Lengths = <<B1P:32/integer,
|
2017-01-02 10:47:04 +00:00
|
|
|
B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
2017-03-21 16:54:23 +00:00
|
|
|
B4L:32/integer,
|
|
|
|
B5L:32/integer>>,
|
2017-01-02 10:47:04 +00:00
|
|
|
SlotBin = <<Lengths/binary,
|
2017-03-10 20:43:37 +00:00
|
|
|
PosBinIndex/binary,
|
2017-03-21 16:54:23 +00:00
|
|
|
B1/binary, B2/binary, B3/binary, B4/binary, B5/binary>>,
|
2017-01-02 10:47:04 +00:00
|
|
|
CRC32 = erlang:crc32(SlotBin),
|
|
|
|
FullBin = <<CRC32:32/integer, SlotBin/binary>>,
|
2017-03-13 23:51:48 +00:00
|
|
|
|
|
|
|
{LastKey, _LV} = lists:last(KVL),
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-03-13 23:51:48 +00:00
|
|
|
{<<Lengths/binary, PosBinIndex/binary>>, FullBin, HashL, LastKey}.
|
2017-03-07 20:19:11 +00:00
|
|
|
|
|
|
|
|
|
|
|
check_blocks([], _Handle, _Slot, _BlockLengths, _LedgerKey) ->
|
|
|
|
not_present;
|
|
|
|
check_blocks([Pos|Rest], Handle, Slot, BlockLengths, LedgerKey) ->
|
|
|
|
{BlockNumber, BlockPos} = revert_position(Pos),
|
|
|
|
BlockBin = read_block(Handle, Slot, BlockLengths, BlockNumber),
|
|
|
|
BlockL = binary_to_term(BlockBin),
|
|
|
|
{K, V} = lists:nth(BlockPos, BlockL),
|
|
|
|
case K of
|
|
|
|
LedgerKey ->
|
|
|
|
{K, V};
|
|
|
|
_ ->
|
|
|
|
check_blocks(Rest, Handle, Slot, BlockLengths, LedgerKey)
|
|
|
|
end.
|
|
|
|
|
|
|
|
|
|
|
|
read_block(Handle, Slot, BlockLengths, BlockID) ->
|
|
|
|
{BlockPos, Offset, Length} = block_offsetandlength(BlockLengths, BlockID),
|
|
|
|
{ok, BlockBin} = file:pread(Handle,
|
|
|
|
Slot#slot_index_value.start_position
|
|
|
|
+ BlockPos
|
|
|
|
+ Offset
|
2017-03-21 16:54:23 +00:00
|
|
|
+ 28,
|
|
|
|
% 4-byte CRC, 4 byte pos, 5x4 byte lengths
|
2017-03-07 20:19:11 +00:00
|
|
|
Length),
|
|
|
|
BlockBin.
|
|
|
|
|
|
|
|
read_slot(Handle, Slot) ->
|
|
|
|
{ok, SlotBin} = file:pread(Handle,
|
|
|
|
Slot#slot_index_value.start_position,
|
|
|
|
Slot#slot_index_value.length),
|
|
|
|
SlotBin.
|
|
|
|
|
|
|
|
read_slots(Handle, SlotList) ->
|
|
|
|
PointerMapFun =
|
|
|
|
fun(Pointer) ->
|
|
|
|
{Slot, SK, EK} =
|
|
|
|
case Pointer of
|
|
|
|
{pointer, _Pid, Slot0, SK0, EK0} ->
|
|
|
|
{Slot0, SK0, EK0};
|
|
|
|
{pointer, Slot0, SK0, EK0} ->
|
|
|
|
{Slot0, SK0, EK0}
|
|
|
|
end,
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-03-07 20:19:11 +00:00
|
|
|
{Slot#slot_index_value.start_position,
|
|
|
|
Slot#slot_index_value.length,
|
|
|
|
SK,
|
|
|
|
EK}
|
|
|
|
end,
|
|
|
|
|
|
|
|
LengthList = lists:map(PointerMapFun, SlotList),
|
|
|
|
StartPos = element(1, lists:nth(1, LengthList)),
|
|
|
|
EndPos = element(1, lists:last(LengthList))
|
|
|
|
+ element(2, lists:last(LengthList)),
|
|
|
|
{ok, MultiSlotBin} = file:pread(Handle, StartPos, EndPos - StartPos),
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-03-07 20:19:11 +00:00
|
|
|
BinSplitMapFun =
|
|
|
|
fun({SP, L, SK, EK}) ->
|
|
|
|
Start = SP - StartPos,
|
|
|
|
<<_Pre:Start/binary,
|
|
|
|
SlotBin:L/binary,
|
|
|
|
_Post/binary>> = MultiSlotBin,
|
|
|
|
{SlotBin, SK, EK}
|
|
|
|
end,
|
|
|
|
|
|
|
|
lists:map(BinSplitMapFun, LengthList).
|
|
|
|
|
|
|
|
|
|
|
|
binaryslot_get(FullBin, Key, Hash) ->
|
2017-01-02 10:47:04 +00:00
|
|
|
case crc_check_slot(FullBin) of
|
2017-03-07 20:19:11 +00:00
|
|
|
{BlockLengths, Rest} ->
|
|
|
|
<<B1P:32/integer, _R/binary>> = BlockLengths,
|
|
|
|
<<PosBinIndex:B1P/binary, Blocks/binary>> = Rest,
|
|
|
|
PosList = find_pos(PosBinIndex,
|
|
|
|
double_hash(Hash, Key),
|
|
|
|
[],
|
|
|
|
0),
|
|
|
|
{fetch_value(PosList, BlockLengths, Blocks, Key),
|
|
|
|
BlockLengths,
|
|
|
|
PosBinIndex};
|
2017-01-02 10:47:04 +00:00
|
|
|
crc_wonky ->
|
2017-03-07 20:19:11 +00:00
|
|
|
{not_present,
|
|
|
|
none,
|
|
|
|
none}
|
2017-01-02 10:47:04 +00:00
|
|
|
end.
|
|
|
|
|
|
|
|
binaryslot_tolist(FullBin) ->
|
|
|
|
BlockFetchFun =
|
|
|
|
fun(Length, {Acc, Bin}) ->
|
|
|
|
case Length of
|
|
|
|
0 ->
|
|
|
|
{Acc, Bin};
|
|
|
|
_ ->
|
|
|
|
<<Block:Length/binary, Rest/binary>> = Bin,
|
|
|
|
{Acc ++ binary_to_term(Block), Rest}
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
|
|
|
|
{Out, _Rem} =
|
|
|
|
case crc_check_slot(FullBin) of
|
2017-03-07 20:19:11 +00:00
|
|
|
{BlockLengths, RestBin} ->
|
|
|
|
<<B1P:32/integer,
|
|
|
|
B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
2017-03-21 16:54:23 +00:00
|
|
|
B4L:32/integer,
|
|
|
|
B5L:32/integer>> = BlockLengths,
|
2017-01-02 10:47:04 +00:00
|
|
|
<<_PosBinIndex:B1P/binary, Blocks/binary>> = RestBin,
|
2017-03-21 16:54:23 +00:00
|
|
|
lists:foldl(BlockFetchFun,
|
|
|
|
{[], Blocks},
|
|
|
|
[B1L, B2L, B3L, B4L, B5L]);
|
2017-01-02 10:47:04 +00:00
|
|
|
crc_wonky ->
|
|
|
|
{[], <<>>}
|
|
|
|
end,
|
|
|
|
Out.
|
|
|
|
|
|
|
|
|
|
|
|
binaryslot_trimmedlist(FullBin, all, all) ->
|
|
|
|
binaryslot_tolist(FullBin);
|
|
|
|
binaryslot_trimmedlist(FullBin, StartKey, EndKey) ->
|
|
|
|
LTrimFun = fun({K, _V}) -> K < StartKey end,
|
|
|
|
RTrimFun = fun({K, _V}) -> not leveled_codec:endkey_passed(EndKey, K) end,
|
2017-03-21 16:54:23 +00:00
|
|
|
|
|
|
|
% It will be more effecient to check a subset of blocks. To work out
|
|
|
|
% the best subset we always look in the middle block of 5, and based on
|
|
|
|
% the first and last keys of that middle block when compared to the Start
|
|
|
|
% and EndKey of the query determines a subset of blocks
|
|
|
|
%
|
|
|
|
% This isn't perfectly efficient, esepcially if the query overlaps Block2
|
|
|
|
% and Block3 (as Block 1 will also be checked), but finessing this last
|
|
|
|
% scenario is hard to do in concise code
|
|
|
|
BlocksToCheck =
|
|
|
|
case crc_check_slot(FullBin) of
|
|
|
|
{BlockLengths, RestBin} ->
|
|
|
|
<<B1P:32/integer,
|
|
|
|
B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
|
|
|
B4L:32/integer,
|
|
|
|
B5L:32/integer>> = BlockLengths,
|
|
|
|
<<_PosBinIndex:B1P/binary,
|
|
|
|
Block1:B1L/binary, Block2:B2L/binary,
|
|
|
|
MidBlock:B3L/binary,
|
|
|
|
Block4:B4L/binary, Block5:B5L/binary>> = RestBin,
|
|
|
|
case B3L of
|
|
|
|
0 ->
|
|
|
|
[Block1, Block2];
|
|
|
|
_ ->
|
|
|
|
MidBlockList = binary_to_term(MidBlock),
|
|
|
|
{MidFirst, _} = lists:nth(1, MidBlockList),
|
|
|
|
{MidLast, _} = lists:last(MidBlockList),
|
|
|
|
Split = {StartKey > MidLast,
|
|
|
|
StartKey >= MidFirst,
|
|
|
|
leveled_codec:endkey_passed(EndKey,
|
|
|
|
MidFirst),
|
|
|
|
leveled_codec:endkey_passed(EndKey,
|
|
|
|
MidLast)},
|
|
|
|
case Split of
|
|
|
|
{true, _, _, _} ->
|
|
|
|
[Block4, Block5];
|
|
|
|
{false, true, false, true} ->
|
|
|
|
[MidBlockList];
|
|
|
|
{false, true, false, false} ->
|
|
|
|
[MidBlockList, Block4, Block5];
|
|
|
|
{false, false, true, true} ->
|
|
|
|
[Block1, Block2];
|
|
|
|
{false, false, false, true} ->
|
|
|
|
[Block1, Block2, MidBlockList];
|
|
|
|
_ ->
|
|
|
|
[Block1, Block2, MidBlockList, Block4, Block5]
|
|
|
|
end
|
|
|
|
end;
|
|
|
|
crc_wonky ->
|
|
|
|
[]
|
|
|
|
end,
|
|
|
|
|
|
|
|
|
|
|
|
BlockCheckFun =
|
|
|
|
fun(Block, {Acc, Continue}) ->
|
|
|
|
case {Block, Continue} of
|
|
|
|
{<<>>, _} ->
|
|
|
|
{Acc, false};
|
2017-03-19 21:47:22 +00:00
|
|
|
{_, true} ->
|
2017-03-21 16:54:23 +00:00
|
|
|
BlockList =
|
|
|
|
case is_binary(Block) of
|
|
|
|
true ->
|
|
|
|
binary_to_term(Block);
|
|
|
|
false ->
|
|
|
|
Block
|
|
|
|
end,
|
2017-01-02 10:47:04 +00:00
|
|
|
{LastKey, _LV} = lists:last(BlockList),
|
2017-03-19 21:47:22 +00:00
|
|
|
case StartKey > LastKey of
|
|
|
|
true ->
|
2017-03-21 16:54:23 +00:00
|
|
|
{Acc, true};
|
2017-03-19 21:47:22 +00:00
|
|
|
false ->
|
2017-01-02 10:47:04 +00:00
|
|
|
{_LDrop, RKeep} = lists:splitwith(LTrimFun,
|
|
|
|
BlockList),
|
2017-03-19 21:47:22 +00:00
|
|
|
case leveled_codec:endkey_passed(EndKey, LastKey) of
|
|
|
|
true ->
|
|
|
|
{LKeep, _RDrop} = lists:splitwith(RTrimFun, RKeep),
|
2017-03-21 16:54:23 +00:00
|
|
|
{Acc ++ LKeep, false};
|
2017-03-19 21:47:22 +00:00
|
|
|
false ->
|
2017-03-21 16:54:23 +00:00
|
|
|
{Acc ++ RKeep, true}
|
2017-03-19 21:47:22 +00:00
|
|
|
end
|
|
|
|
end;
|
|
|
|
{_ , false} ->
|
2017-03-21 16:54:23 +00:00
|
|
|
{Acc, false}
|
2017-01-02 10:47:04 +00:00
|
|
|
end
|
|
|
|
end,
|
|
|
|
|
2017-03-21 16:54:23 +00:00
|
|
|
{Acc, _Continue} = lists:foldl(BlockCheckFun, {[], true}, BlocksToCheck),
|
|
|
|
Acc.
|
2017-01-02 10:47:04 +00:00
|
|
|
|
|
|
|
|
|
|
|
crc_check_slot(FullBin) ->
|
|
|
|
<<CRC32:32/integer, SlotBin/binary>> = FullBin,
|
|
|
|
case erlang:crc32(SlotBin) of
|
|
|
|
CRC32 ->
|
2017-03-21 16:54:23 +00:00
|
|
|
<<BlockLengths:24/binary, Rest/binary>> = SlotBin,
|
2017-03-07 20:19:11 +00:00
|
|
|
{BlockLengths, Rest};
|
2017-01-02 10:47:04 +00:00
|
|
|
_ ->
|
2017-01-02 18:54:19 +00:00
|
|
|
leveled_log:log("SST09", []),
|
2017-01-02 10:47:04 +00:00
|
|
|
crc_wonky
|
|
|
|
end.
|
|
|
|
|
2017-03-07 20:19:11 +00:00
|
|
|
block_offsetandlength(BlockLengths, BlockID) ->
|
2017-03-21 16:54:23 +00:00
|
|
|
<<BlocksPos:32/integer, BlockLengths0:20/binary>> = BlockLengths,
|
2017-03-07 20:19:11 +00:00
|
|
|
case BlockID of
|
|
|
|
1 ->
|
|
|
|
<<B1L:32/integer, _BR/binary>> = BlockLengths0,
|
|
|
|
{BlocksPos, 0, B1L};
|
|
|
|
2 ->
|
|
|
|
<<B1L:32/integer, B2L:32/integer, _BR/binary>> = BlockLengths0,
|
|
|
|
{BlocksPos, B1L, B2L};
|
|
|
|
3 ->
|
|
|
|
<<B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
|
|
|
_BR/binary>> = BlockLengths0,
|
|
|
|
{BlocksPos, B1L + B2L, B3L};
|
|
|
|
4 ->
|
|
|
|
<<B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
2017-03-21 16:54:23 +00:00
|
|
|
B4L:32/integer,
|
|
|
|
_BR/binary>> = BlockLengths0,
|
|
|
|
{BlocksPos, B1L + B2L + B3L, B4L};
|
|
|
|
5 ->
|
|
|
|
<<B1L:32/integer,
|
|
|
|
B2L:32/integer,
|
|
|
|
B3L:32/integer,
|
|
|
|
B4L:32/integer,
|
|
|
|
B5L:32/integer,
|
|
|
|
_BR/binary>> = BlockLengths0,
|
|
|
|
{BlocksPos, B1L + B2L + B3L + B4L, B5L}
|
2017-03-07 20:19:11 +00:00
|
|
|
end.
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
double_hash(Hash, Key) ->
|
|
|
|
H2 = erlang:phash2(Key),
|
|
|
|
(Hash bxor H2) band 32767.
|
|
|
|
|
2017-03-07 20:19:11 +00:00
|
|
|
fetch_value([], _BlockLengths, _Blocks, _Key) ->
|
2017-01-02 10:47:04 +00:00
|
|
|
not_present;
|
2017-03-07 20:19:11 +00:00
|
|
|
fetch_value([Pos|Rest], BlockLengths, Blocks, Key) ->
|
|
|
|
{BlockNumber, BlockPos} = revert_position(Pos),
|
|
|
|
{_BlockPos,
|
|
|
|
Offset,
|
|
|
|
Length} = block_offsetandlength(BlockLengths, BlockNumber),
|
|
|
|
<<_Pre:Offset/binary, Block:Length/binary, _Rest/binary>> = Blocks,
|
|
|
|
BlockL = binary_to_term(Block),
|
2017-01-02 10:47:04 +00:00
|
|
|
{K, V} = lists:nth(BlockPos, BlockL),
|
|
|
|
case K of
|
|
|
|
Key ->
|
|
|
|
{K, V};
|
2017-03-21 16:54:23 +00:00
|
|
|
_ ->
|
2017-03-07 20:19:11 +00:00
|
|
|
fetch_value(Rest, BlockLengths, Blocks, Key)
|
2017-01-02 10:47:04 +00:00
|
|
|
end.
|
|
|
|
|
2017-03-07 20:19:11 +00:00
|
|
|
|
|
|
|
revert_position(Pos) ->
|
2017-03-21 16:54:23 +00:00
|
|
|
{SideBlockSize, MidBlockSize} = ?LOOK_BLOCKSIZE,
|
|
|
|
case Pos < 2 * SideBlockSize of
|
|
|
|
true ->
|
|
|
|
{(Pos div SideBlockSize) + 1, (Pos rem SideBlockSize) + 1};
|
|
|
|
false ->
|
|
|
|
case Pos < (2 * SideBlockSize + MidBlockSize) of
|
|
|
|
true ->
|
|
|
|
{3, ((Pos - 2 * SideBlockSize) rem MidBlockSize) + 1};
|
|
|
|
false ->
|
|
|
|
TailPos = Pos - 2 * SideBlockSize - MidBlockSize,
|
|
|
|
{(TailPos div SideBlockSize) + 4,
|
|
|
|
(TailPos rem SideBlockSize) + 1}
|
|
|
|
end
|
|
|
|
end.
|
2017-03-07 20:19:11 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
find_pos(<<>>, _Hash, PosList, _Count) ->
|
|
|
|
PosList;
|
|
|
|
find_pos(<<1:1/integer, Hash:15/integer, T/binary>>, Hash, PosList, Count) ->
|
|
|
|
find_pos(T, Hash, PosList ++ [Count], Count + 1);
|
|
|
|
find_pos(<<1:1/integer, _Miss:15/integer, T/binary>>, Hash, PosList, Count) ->
|
|
|
|
find_pos(T, Hash, PosList, Count + 1);
|
|
|
|
find_pos(<<0:1/integer, NHC:7/integer, T/binary>>, Hash, PosList, Count) ->
|
|
|
|
find_pos(T, Hash, PosList, Count + NHC + 1).
|
|
|
|
|
|
|
|
|
|
|
|
|
2016-12-28 21:47:05 +00:00
|
|
|
%%%============================================================================
|
|
|
|
%%% Merge Functions
|
|
|
|
%%%============================================================================
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
%% The source lists are merged into lists of slots before the file is created
|
|
|
|
%% At Level zero, there will be a single source list - and this will always be
|
|
|
|
%% split into standard size slots
|
|
|
|
%%
|
|
|
|
%% At lower levels there will be two source lists and they will need to be
|
|
|
|
%% merged to ensure that the best conflicting answer survives and compactable
|
|
|
|
%% KV pairs are discarded.
|
|
|
|
%%
|
|
|
|
%% At lower levels slots can be larger if there are no lookup keys present in
|
|
|
|
%% the slot. This is to slow the growth of the manifest/number-of-files when
|
|
|
|
%% large numbers of index keys are present - as well as improving compression
|
|
|
|
%% ratios in the Ledger.
|
|
|
|
%%
|
|
|
|
%% The outcome of merge_lists/1 and merge_lists/3 should be an list of slots.
|
|
|
|
%% Each slot should be ordered by Key and be of the form {Flag, KVList}, where
|
|
|
|
%% Flag can either be lookup or no-lookup. The list of slots should also be
|
|
|
|
%% ordered by Key (i.e. the first key in the slot)
|
|
|
|
%%
|
|
|
|
%% For merging ...
|
2016-12-28 21:47:05 +00:00
|
|
|
%% Compare the keys at the head of the list, and either skip that "best" key or
|
|
|
|
%% identify as the next key.
|
|
|
|
%%
|
|
|
|
%% The logic needs to change if the file is in the basement level, as keys with
|
|
|
|
%% expired timestamps need not be written at this level
|
|
|
|
%%
|
|
|
|
%% The best key is considered to be the lowest key in erlang term order. If
|
|
|
|
%% there are matching keys then the highest sequence number must be chosen and
|
|
|
|
%% any lower sequence numbers should be compacted out of existence
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
merge_lists(KVList1) ->
|
2017-03-21 16:54:23 +00:00
|
|
|
SlotCount = length(KVList1) div ?LOOK_SLOTSIZE,
|
2017-03-13 23:51:48 +00:00
|
|
|
{[],
|
|
|
|
[],
|
|
|
|
split_lists(KVList1, [], SlotCount),
|
|
|
|
element(1, lists:nth(1, KVList1))}.
|
2017-03-10 20:43:37 +00:00
|
|
|
|
|
|
|
split_lists([], SlotLists, 0) ->
|
|
|
|
lists:reverse(SlotLists);
|
|
|
|
split_lists(LastPuff, SlotLists, 0) ->
|
2017-03-13 23:51:48 +00:00
|
|
|
SlotD = generate_binary_slot(lookup, LastPuff),
|
|
|
|
lists:reverse([SlotD|SlotLists]);
|
2017-03-10 20:43:37 +00:00
|
|
|
split_lists(KVList1, SlotLists, N) ->
|
2017-03-21 16:54:23 +00:00
|
|
|
{Slot, KVListRem} = lists:split(?LOOK_SLOTSIZE, KVList1),
|
2017-03-13 23:51:48 +00:00
|
|
|
SlotD = generate_binary_slot(lookup, Slot),
|
|
|
|
split_lists(KVListRem, [SlotD|SlotLists], N - 1).
|
2017-03-10 20:43:37 +00:00
|
|
|
|
|
|
|
merge_lists(KVList1, KVList2, LevelInfo) ->
|
2017-03-13 23:51:48 +00:00
|
|
|
merge_lists(KVList1, KVList2, LevelInfo, [], null, 0).
|
|
|
|
|
|
|
|
merge_lists(KVList1, KVList2, _LI, SlotList, FirstKey, ?MAX_SLOTS) ->
|
|
|
|
{KVList1, KVList2, lists:reverse(SlotList), FirstKey};
|
|
|
|
merge_lists([], [], _LI, SlotList, FirstKey, _SlotCount) ->
|
|
|
|
{[], [], lists:reverse(SlotList), FirstKey};
|
|
|
|
merge_lists(KVList1, KVList2, LI, SlotList, FirstKey, SlotCount) ->
|
|
|
|
{KVRem1, KVRem2, Slot, FK0} =
|
|
|
|
form_slot(KVList1, KVList2, LI, no_lookup, 0, [], FirstKey),
|
2017-03-10 20:43:37 +00:00
|
|
|
case Slot of
|
2017-03-11 12:41:30 +00:00
|
|
|
{_, []} ->
|
2017-03-13 23:51:48 +00:00
|
|
|
merge_lists(KVRem1,
|
|
|
|
KVRem2,
|
|
|
|
LI,
|
|
|
|
SlotList,
|
|
|
|
FK0,
|
|
|
|
SlotCount);
|
|
|
|
{Lookup, KVL} ->
|
|
|
|
SlotD = generate_binary_slot(Lookup, KVL),
|
|
|
|
merge_lists(KVRem1,
|
|
|
|
KVRem2,
|
|
|
|
LI,
|
|
|
|
[SlotD|SlotList],
|
|
|
|
FK0,
|
|
|
|
SlotCount + 1)
|
2017-03-10 20:43:37 +00:00
|
|
|
end.
|
|
|
|
|
2017-03-13 23:51:48 +00:00
|
|
|
form_slot([], [], _LI, Type, _Size, Slot, FK) ->
|
|
|
|
{[], [], {Type, lists:reverse(Slot)}, FK};
|
2017-03-21 16:54:23 +00:00
|
|
|
|
|
|
|
form_slot(KVList1, KVList2, _LI, lookup, ?LOOK_SLOTSIZE, Slot, FK) ->
|
2017-03-13 23:51:48 +00:00
|
|
|
{KVList1, KVList2, {lookup, lists:reverse(Slot)}, FK};
|
|
|
|
form_slot(KVList1, KVList2, _LI, no_lookup, ?NOLOOK_SLOTSIZE, Slot, FK) ->
|
|
|
|
{KVList1, KVList2, {no_lookup, lists:reverse(Slot)}, FK};
|
|
|
|
form_slot(KVList1, KVList2, {IsBasement, TS}, lookup, Size, Slot, FK) ->
|
|
|
|
case {key_dominates(KVList1, KVList2, {IsBasement, TS}), FK} of
|
|
|
|
{{{next_key, TopKV}, Rem1, Rem2}, _} ->
|
|
|
|
form_slot(Rem1,
|
|
|
|
Rem2,
|
|
|
|
{IsBasement, TS},
|
|
|
|
lookup,
|
|
|
|
Size + 1,
|
|
|
|
[TopKV|Slot],
|
|
|
|
FK);
|
|
|
|
{{skipped_key, Rem1, Rem2}, _} ->
|
|
|
|
form_slot(Rem1, Rem2, {IsBasement, TS}, lookup, Size, Slot, FK)
|
2017-03-10 20:43:37 +00:00
|
|
|
end;
|
2017-03-13 23:51:48 +00:00
|
|
|
form_slot(KVList1, KVList2, {IsBasement, TS}, no_lookup, Size, Slot, FK) ->
|
2017-03-10 20:43:37 +00:00
|
|
|
case key_dominates(KVList1, KVList2, {IsBasement, TS}) of
|
|
|
|
{{next_key, {TopK, TopV}}, Rem1, Rem2} ->
|
2017-03-13 23:51:48 +00:00
|
|
|
FK0 =
|
|
|
|
case FK of
|
|
|
|
null ->
|
|
|
|
TopK;
|
|
|
|
_ ->
|
|
|
|
FK
|
|
|
|
end,
|
2017-03-10 20:43:37 +00:00
|
|
|
case leveled_codec:to_lookup(TopK) of
|
|
|
|
no_lookup ->
|
|
|
|
form_slot(Rem1,
|
|
|
|
Rem2,
|
|
|
|
{IsBasement, TS},
|
|
|
|
no_lookup,
|
|
|
|
Size + 1,
|
2017-03-13 23:51:48 +00:00
|
|
|
[{TopK, TopV}|Slot],
|
|
|
|
FK0);
|
2017-03-10 20:43:37 +00:00
|
|
|
lookup ->
|
2017-03-21 16:54:23 +00:00
|
|
|
case Size >= ?LOOK_SLOTSIZE of
|
2017-03-10 20:43:37 +00:00
|
|
|
true ->
|
|
|
|
{KVList1,
|
|
|
|
KVList2,
|
2017-03-14 00:52:07 +00:00
|
|
|
{no_lookup, lists:reverse(Slot)},
|
|
|
|
FK};
|
2017-03-10 20:43:37 +00:00
|
|
|
false ->
|
|
|
|
form_slot(Rem1,
|
|
|
|
Rem2,
|
|
|
|
{IsBasement, TS},
|
|
|
|
lookup,
|
|
|
|
Size + 1,
|
2017-03-13 23:51:48 +00:00
|
|
|
[{TopK, TopV}|Slot],
|
|
|
|
FK0)
|
2017-03-10 20:43:37 +00:00
|
|
|
end
|
|
|
|
end;
|
|
|
|
{skipped_key, Rem1, Rem2} ->
|
2017-03-13 23:51:48 +00:00
|
|
|
form_slot(Rem1, Rem2, {IsBasement, TS}, no_lookup, Size, Slot, FK)
|
2016-12-28 21:47:05 +00:00
|
|
|
end.
|
|
|
|
|
|
|
|
key_dominates(KL1, KL2, Level) ->
|
|
|
|
key_dominates_expanded(maybe_expand_pointer(KL1),
|
|
|
|
maybe_expand_pointer(KL2),
|
|
|
|
Level).
|
|
|
|
|
|
|
|
key_dominates_expanded([H1|T1], [], Level) ->
|
|
|
|
case leveled_codec:maybe_reap_expiredkey(H1, Level) of
|
|
|
|
true ->
|
|
|
|
{skipped_key, T1, []};
|
|
|
|
false ->
|
|
|
|
{{next_key, H1}, T1, []}
|
|
|
|
end;
|
|
|
|
key_dominates_expanded([], [H2|T2], Level) ->
|
|
|
|
case leveled_codec:maybe_reap_expiredkey(H2, Level) of
|
|
|
|
true ->
|
|
|
|
{skipped_key, [], T2};
|
|
|
|
false ->
|
|
|
|
{{next_key, H2}, [], T2}
|
|
|
|
end;
|
|
|
|
key_dominates_expanded([H1|T1], [H2|T2], Level) ->
|
|
|
|
case leveled_codec:key_dominates(H1, H2) of
|
|
|
|
left_hand_first ->
|
|
|
|
case leveled_codec:maybe_reap_expiredkey(H1, Level) of
|
|
|
|
true ->
|
|
|
|
{skipped_key, T1, [H2|T2]};
|
|
|
|
false ->
|
|
|
|
{{next_key, H1}, T1, [H2|T2]}
|
|
|
|
end;
|
|
|
|
right_hand_first ->
|
|
|
|
case leveled_codec:maybe_reap_expiredkey(H2, Level) of
|
|
|
|
true ->
|
|
|
|
{skipped_key, [H1|T1], T2};
|
|
|
|
false ->
|
|
|
|
{{next_key, H2}, [H1|T1], T2}
|
|
|
|
end;
|
|
|
|
left_hand_dominant ->
|
|
|
|
{skipped_key, [H1|T1], T2};
|
|
|
|
right_hand_dominant ->
|
|
|
|
{skipped_key, T1, [H2|T2]}
|
|
|
|
end.
|
|
|
|
|
|
|
|
|
|
|
|
%% When a list is provided it may include a pointer to gain another batch of
|
|
|
|
%% entries from the same file, or a new batch of entries from another file
|
|
|
|
%%
|
|
|
|
%% This resultant list should include the Tail of any pointers added at the
|
|
|
|
%% end of the list
|
|
|
|
|
|
|
|
maybe_expand_pointer([]) ->
|
|
|
|
[];
|
2016-12-29 02:07:14 +00:00
|
|
|
maybe_expand_pointer([{pointer, SSTPid, Slot, StartKey, all}|Tail]) ->
|
|
|
|
expand_list_by_pointer({pointer, SSTPid, Slot, StartKey, all},
|
|
|
|
Tail,
|
|
|
|
?MERGE_SCANWIDTH);
|
2017-01-17 10:12:15 +00:00
|
|
|
maybe_expand_pointer([{next, ManEntry, StartKey}|Tail]) ->
|
|
|
|
expand_list_by_pointer({next, ManEntry, StartKey, all},
|
2016-12-29 02:07:14 +00:00
|
|
|
Tail,
|
|
|
|
?MERGE_SCANWIDTH);
|
|
|
|
maybe_expand_pointer(List) ->
|
|
|
|
List.
|
|
|
|
|
|
|
|
|
2016-12-29 10:46:12 +00:00
|
|
|
expand_list_by_pointer({pointer, SSTPid, Slot, StartKey, EndKey}, Tail, Width) ->
|
2016-12-28 21:47:05 +00:00
|
|
|
FoldFun =
|
|
|
|
fun(X, {Pointers, Remainder}) ->
|
|
|
|
case length(Pointers) of
|
2016-12-29 02:07:14 +00:00
|
|
|
L when L < Width ->
|
2016-12-28 21:47:05 +00:00
|
|
|
case X of
|
2016-12-29 02:07:14 +00:00
|
|
|
{pointer, SSTPid, S, SK, EK} ->
|
2016-12-28 21:47:05 +00:00
|
|
|
{Pointers ++ [{pointer, S, SK, EK}], Remainder};
|
|
|
|
_ ->
|
|
|
|
{Pointers, Remainder ++ [X]}
|
|
|
|
end;
|
|
|
|
_ ->
|
|
|
|
{Pointers, Remainder ++ [X]}
|
|
|
|
end
|
|
|
|
end,
|
2016-12-29 10:46:12 +00:00
|
|
|
InitAcc = {[{pointer, Slot, StartKey, EndKey}], []},
|
2016-12-28 21:47:05 +00:00
|
|
|
{AccPointers, AccTail} = lists:foldl(FoldFun, InitAcc, Tail),
|
2016-12-29 02:07:14 +00:00
|
|
|
ExpPointers = leveled_sst:sst_getslots(SSTPid, AccPointers),
|
2016-12-28 21:47:05 +00:00
|
|
|
lists:append(ExpPointers, AccTail);
|
2017-01-17 10:12:15 +00:00
|
|
|
expand_list_by_pointer({next, ManEntry, StartKey, EndKey}, Tail, Width) ->
|
|
|
|
SSTPid = ManEntry#manifest_entry.owner,
|
|
|
|
leveled_log:log("SST10", [SSTPid, is_process_alive(SSTPid)]),
|
2016-12-29 02:07:14 +00:00
|
|
|
ExpPointer = leveled_sst:sst_getkvrange(SSTPid, StartKey, EndKey, Width),
|
|
|
|
ExpPointer ++ Tail.
|
2016-12-28 21:47:05 +00:00
|
|
|
|
|
|
|
|
2016-12-24 17:48:31 +00:00
|
|
|
|
2016-12-23 12:30:58 +00:00
|
|
|
%%%============================================================================
|
|
|
|
%%% Test
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
-ifdef(TEST).
|
|
|
|
|
|
|
|
generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|
|
|
generate_randomkeys(Seqn,
|
|
|
|
Count,
|
|
|
|
[],
|
|
|
|
BucketRangeLow,
|
|
|
|
BucketRangeHigh).
|
|
|
|
|
|
|
|
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
|
|
|
Acc;
|
|
|
|
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
2017-01-04 21:36:59 +00:00
|
|
|
BRand = random:uniform(BRange),
|
|
|
|
BNumber = string:right(integer_to_list(BucketLow + BRand), 4, $0),
|
2016-12-28 15:48:04 +00:00
|
|
|
KNumber = string:right(integer_to_list(random:uniform(1000)), 6, $0),
|
2017-06-27 16:25:09 +01:00
|
|
|
LK = leveled_codec:to_ledgerkey("Bucket" ++ BNumber, "Key" ++ KNumber, o),
|
|
|
|
Chunk = crypto:rand_bytes(64),
|
|
|
|
{_B, _K, MV, _H, _LMs} =
|
|
|
|
leveled_codec:generate_ledgerkv(LK, Seqn, Chunk, 64, infinity),
|
2016-12-23 12:30:58 +00:00
|
|
|
generate_randomkeys(Seqn + 1,
|
|
|
|
Count - 1,
|
2017-06-27 16:25:09 +01:00
|
|
|
[{LK, MV}|Acc],
|
2016-12-23 12:30:58 +00:00
|
|
|
BucketLow,
|
|
|
|
BRange).
|
|
|
|
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
generate_indexkeys(Count) ->
|
|
|
|
generate_indexkeys(Count, []).
|
|
|
|
|
|
|
|
generate_indexkeys(0, IndexList) ->
|
|
|
|
IndexList;
|
|
|
|
generate_indexkeys(Count, IndexList) ->
|
2017-03-15 11:27:46 +00:00
|
|
|
Changes = generate_indexkey(random:uniform(8000), Count),
|
2017-01-02 10:47:04 +00:00
|
|
|
generate_indexkeys(Count - 1, IndexList ++ Changes).
|
|
|
|
|
2017-03-15 11:27:46 +00:00
|
|
|
generate_indexkey(Term, Count) ->
|
|
|
|
IndexSpecs = [{add, "t1_int", Term}],
|
2017-06-30 10:03:36 +01:00
|
|
|
leveled_codec:idx_indexspecs(IndexSpecs,
|
|
|
|
"Bucket",
|
|
|
|
"Key" ++ integer_to_list(Count),
|
|
|
|
Count,
|
|
|
|
infinity).
|
2017-01-02 10:47:04 +00:00
|
|
|
|
2017-03-11 00:03:55 +00:00
|
|
|
form_slot_test() ->
|
|
|
|
% If a skip key happens, mustn't switch to loookup by accident as could be
|
|
|
|
% over the expected size
|
|
|
|
SkippingKV = {{o, "B1", "K9999", null}, {9999, tomb, 1234567, {}}},
|
|
|
|
Slot = [{{o, "B1", "K5", null}, {5, active, 99234567, {}}}],
|
|
|
|
R1 = form_slot([SkippingKV], [],
|
|
|
|
{true, 99999999},
|
|
|
|
no_lookup,
|
2017-03-21 16:54:23 +00:00
|
|
|
?LOOK_SLOTSIZE + 1,
|
2017-03-13 23:51:48 +00:00
|
|
|
Slot,
|
|
|
|
{o, "B1", "K5", null}),
|
|
|
|
?assertMatch({[], [], {no_lookup, Slot}, {o, "B1", "K5", null}}, R1).
|
2017-03-11 00:03:55 +00:00
|
|
|
|
2017-03-11 12:41:30 +00:00
|
|
|
merge_tombstonelist_test() ->
|
|
|
|
% Merge lists wiht nothing but tombstones
|
|
|
|
SkippingKV1 = {{o, "B1", "K9995", null}, {9995, tomb, 1234567, {}}},
|
|
|
|
SkippingKV2 = {{o, "B1", "K9996", null}, {9996, tomb, 1234567, {}}},
|
|
|
|
SkippingKV3 = {{o, "B1", "K9997", null}, {9997, tomb, 1234567, {}}},
|
|
|
|
SkippingKV4 = {{o, "B1", "K9998", null}, {9998, tomb, 1234567, {}}},
|
|
|
|
SkippingKV5 = {{o, "B1", "K9999", null}, {9999, tomb, 1234567, {}}},
|
|
|
|
R = merge_lists([SkippingKV1, SkippingKV3, SkippingKV5],
|
|
|
|
[SkippingKV2, SkippingKV4],
|
|
|
|
{true, 9999999}),
|
2017-03-14 00:17:09 +00:00
|
|
|
?assertMatch({[], [], [], null}, R).
|
2017-03-11 12:41:30 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
indexed_list_test() ->
|
|
|
|
io:format(user, "~nIndexed list timing test:~n", []),
|
2016-12-29 22:22:13 +00:00
|
|
|
N = 150,
|
|
|
|
KVL0 = lists:ukeysort(1, generate_randomkeys(1, N, 1, 4)),
|
|
|
|
KVL1 = lists:sublist(KVL0, 128),
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
SW0 = os:timestamp(),
|
|
|
|
|
2017-03-13 23:51:48 +00:00
|
|
|
{_PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(lookup, KVL1),
|
2016-12-29 14:11:05 +00:00
|
|
|
io:format(user,
|
2017-01-02 10:47:04 +00:00
|
|
|
"Indexed list created slot in ~w microseconds of size ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW0), byte_size(FullBin)]),
|
|
|
|
|
2016-12-29 22:22:13 +00:00
|
|
|
{TestK1, TestV1} = lists:nth(20, KVL1),
|
|
|
|
MH1 = leveled_codec:magic_hash(TestK1),
|
|
|
|
{TestK2, TestV2} = lists:nth(40, KVL1),
|
|
|
|
MH2 = leveled_codec:magic_hash(TestK2),
|
|
|
|
{TestK3, TestV3} = lists:nth(60, KVL1),
|
|
|
|
MH3 = leveled_codec:magic_hash(TestK3),
|
|
|
|
{TestK4, TestV4} = lists:nth(80, KVL1),
|
|
|
|
MH4 = leveled_codec:magic_hash(TestK4),
|
|
|
|
{TestK5, TestV5} = lists:nth(100, KVL1),
|
|
|
|
MH5 = leveled_codec:magic_hash(TestK5),
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
test_binary_slot(FullBin, TestK1, MH1, {TestK1, TestV1}),
|
|
|
|
test_binary_slot(FullBin, TestK2, MH2, {TestK2, TestV2}),
|
|
|
|
test_binary_slot(FullBin, TestK3, MH3, {TestK3, TestV3}),
|
|
|
|
test_binary_slot(FullBin, TestK4, MH4, {TestK4, TestV4}),
|
|
|
|
test_binary_slot(FullBin, TestK5, MH5, {TestK5, TestV5}).
|
2016-12-29 22:22:13 +00:00
|
|
|
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
indexed_list_mixedkeys_test() ->
|
|
|
|
KVL0 = lists:ukeysort(1, generate_randomkeys(1, 50, 1, 4)),
|
|
|
|
KVL1 = lists:sublist(KVL0, 33),
|
|
|
|
Keys = lists:ukeysort(1, generate_indexkeys(60) ++ KVL1),
|
2016-12-29 22:22:13 +00:00
|
|
|
|
2017-03-13 23:51:48 +00:00
|
|
|
{_PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(lookup, Keys),
|
2016-12-29 22:22:13 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
{TestK1, TestV1} = lists:nth(4, KVL1),
|
|
|
|
MH1 = leveled_codec:magic_hash(TestK1),
|
|
|
|
{TestK2, TestV2} = lists:nth(8, KVL1),
|
|
|
|
MH2 = leveled_codec:magic_hash(TestK2),
|
|
|
|
{TestK3, TestV3} = lists:nth(12, KVL1),
|
|
|
|
MH3 = leveled_codec:magic_hash(TestK3),
|
|
|
|
{TestK4, TestV4} = lists:nth(16, KVL1),
|
|
|
|
MH4 = leveled_codec:magic_hash(TestK4),
|
|
|
|
{TestK5, TestV5} = lists:nth(20, KVL1),
|
|
|
|
MH5 = leveled_codec:magic_hash(TestK5),
|
2016-12-29 22:22:13 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
test_binary_slot(FullBin, TestK1, MH1, {TestK1, TestV1}),
|
|
|
|
test_binary_slot(FullBin, TestK2, MH2, {TestK2, TestV2}),
|
|
|
|
test_binary_slot(FullBin, TestK3, MH3, {TestK3, TestV3}),
|
|
|
|
test_binary_slot(FullBin, TestK4, MH4, {TestK4, TestV4}),
|
|
|
|
test_binary_slot(FullBin, TestK5, MH5, {TestK5, TestV5}).
|
|
|
|
|
2017-01-04 21:36:59 +00:00
|
|
|
indexed_list_mixedkeys2_test() ->
|
|
|
|
KVL0 = lists:ukeysort(1, generate_randomkeys(1, 50, 1, 4)),
|
|
|
|
KVL1 = lists:sublist(KVL0, 33),
|
|
|
|
IdxKeys1 = lists:ukeysort(1, generate_indexkeys(30)),
|
|
|
|
IdxKeys2 = lists:ukeysort(1, generate_indexkeys(30)),
|
|
|
|
% this isn't actually ordered correctly
|
|
|
|
Keys = IdxKeys1 ++ KVL1 ++ IdxKeys2,
|
2017-03-13 23:51:48 +00:00
|
|
|
{_PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(lookup, Keys),
|
2017-01-04 21:36:59 +00:00
|
|
|
lists:foreach(fun({K, V}) ->
|
|
|
|
MH = leveled_codec:magic_hash(K),
|
|
|
|
test_binary_slot(FullBin, K, MH, {K, V})
|
|
|
|
end,
|
|
|
|
KVL1).
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
indexed_list_allindexkeys_test() ->
|
|
|
|
Keys = lists:sublist(lists:ukeysort(1, generate_indexkeys(150)), 128),
|
2017-03-13 23:51:48 +00:00
|
|
|
{PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(lookup, Keys),
|
2017-03-21 16:54:23 +00:00
|
|
|
?assertMatch(<<_BL:24/binary, 127:8/integer>>, PosBinIndex1),
|
2017-01-02 10:47:04 +00:00
|
|
|
% SW = os:timestamp(),
|
|
|
|
BinToList = binaryslot_tolist(FullBin),
|
2016-12-29 22:22:13 +00:00
|
|
|
% io:format(user,
|
2017-01-02 10:47:04 +00:00
|
|
|
% "Indexed list flattened in ~w microseconds ~n",
|
|
|
|
% [timer:now_diff(os:timestamp(), SW)]),
|
|
|
|
?assertMatch(Keys, BinToList),
|
|
|
|
?assertMatch(Keys, binaryslot_trimmedlist(FullBin, all, all)).
|
|
|
|
|
2017-03-10 20:43:37 +00:00
|
|
|
indexed_list_allindexkeys_nolookup_test() ->
|
|
|
|
Keys = lists:sublist(lists:ukeysort(1, generate_indexkeys(1000)),
|
2017-03-21 16:54:23 +00:00
|
|
|
?NOLOOK_SLOTSIZE),
|
2017-03-13 23:51:48 +00:00
|
|
|
{PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(no_lookup, Keys),
|
2017-03-21 16:54:23 +00:00
|
|
|
?assertMatch(<<_BL:24/binary, 127:8/integer>>, PosBinIndex1),
|
2017-03-10 20:43:37 +00:00
|
|
|
% SW = os:timestamp(),
|
|
|
|
BinToList = binaryslot_tolist(FullBin),
|
|
|
|
% io:format(user,
|
|
|
|
% "Indexed list flattened in ~w microseconds ~n",
|
|
|
|
% [timer:now_diff(os:timestamp(), SW)]),
|
|
|
|
?assertMatch(Keys, BinToList),
|
|
|
|
?assertMatch(Keys, binaryslot_trimmedlist(FullBin, all, all)).
|
2017-01-02 10:47:04 +00:00
|
|
|
|
|
|
|
indexed_list_allindexkeys_trimmed_test() ->
|
|
|
|
Keys = lists:sublist(lists:ukeysort(1, generate_indexkeys(150)), 128),
|
2017-03-13 23:51:48 +00:00
|
|
|
{PosBinIndex1, FullBin, _HL, _LK} = generate_binary_slot(lookup, Keys),
|
2017-03-21 16:54:23 +00:00
|
|
|
?assertMatch(<<_BL:24/binary, 127:8/integer>>, PosBinIndex1),
|
2017-01-02 10:47:04 +00:00
|
|
|
?assertMatch(Keys, binaryslot_trimmedlist(FullBin,
|
|
|
|
{i,
|
|
|
|
"Bucket",
|
|
|
|
{"t1_int", 0},
|
|
|
|
null},
|
|
|
|
{i,
|
|
|
|
"Bucket",
|
|
|
|
{"t1_int", 99999},
|
|
|
|
null})),
|
2016-12-29 14:14:09 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
{SK1, _} = lists:nth(10, Keys),
|
|
|
|
{EK1, _} = lists:nth(100, Keys),
|
|
|
|
R1 = lists:sublist(Keys, 10, 91),
|
|
|
|
O1 = binaryslot_trimmedlist(FullBin, SK1, EK1),
|
|
|
|
?assertMatch(91, length(O1)),
|
|
|
|
?assertMatch(R1, O1),
|
|
|
|
|
|
|
|
{SK2, _} = lists:nth(10, Keys),
|
|
|
|
{EK2, _} = lists:nth(20, Keys),
|
|
|
|
R2 = lists:sublist(Keys, 10, 11),
|
|
|
|
O2 = binaryslot_trimmedlist(FullBin, SK2, EK2),
|
|
|
|
?assertMatch(11, length(O2)),
|
|
|
|
?assertMatch(R2, O2),
|
|
|
|
|
|
|
|
{SK3, _} = lists:nth(127, Keys),
|
|
|
|
{EK3, _} = lists:nth(128, Keys),
|
|
|
|
R3 = lists:sublist(Keys, 127, 2),
|
|
|
|
O3 = binaryslot_trimmedlist(FullBin, SK3, EK3),
|
|
|
|
?assertMatch(2, length(O3)),
|
|
|
|
?assertMatch(R3, O3).
|
|
|
|
|
|
|
|
|
|
|
|
indexed_list_mixedkeys_bitflip_test() ->
|
|
|
|
KVL0 = lists:ukeysort(1, generate_randomkeys(1, 50, 1, 4)),
|
|
|
|
KVL1 = lists:sublist(KVL0, 33),
|
|
|
|
Keys = lists:ukeysort(1, generate_indexkeys(60) ++ KVL1),
|
2017-03-13 23:51:48 +00:00
|
|
|
{_PosBinIndex1, FullBin, _HL, LK} = generate_binary_slot(lookup, Keys),
|
|
|
|
?assertMatch(LK, element(1, lists:last(Keys))),
|
2017-01-02 10:47:04 +00:00
|
|
|
L = byte_size(FullBin),
|
|
|
|
Byte1 = random:uniform(L),
|
|
|
|
<<PreB1:Byte1/binary, A:8/integer, PostByte1/binary>> = FullBin,
|
|
|
|
FullBin0 =
|
|
|
|
case A of
|
|
|
|
0 ->
|
|
|
|
<<PreB1:Byte1/binary, 255:8/integer, PostByte1/binary>>;
|
|
|
|
_ ->
|
|
|
|
<<PreB1:Byte1/binary, 0:8/integer, PostByte1/binary>>
|
|
|
|
end,
|
|
|
|
|
|
|
|
{TestK1, _TestV1} = lists:nth(20, KVL1),
|
|
|
|
MH1 = leveled_codec:magic_hash(TestK1),
|
2016-12-29 14:14:09 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
test_binary_slot(FullBin0, TestK1, MH1, not_present),
|
|
|
|
ToList = binaryslot_tolist(FullBin0),
|
|
|
|
?assertMatch([], ToList),
|
2016-12-29 14:14:09 +00:00
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
{SK1, _} = lists:nth(10, Keys),
|
|
|
|
{EK1, _} = lists:nth(50, Keys),
|
|
|
|
O1 = binaryslot_trimmedlist(FullBin0, SK1, EK1),
|
|
|
|
?assertMatch(0, length(O1)),
|
|
|
|
?assertMatch([], O1).
|
2016-12-29 22:22:13 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
2017-01-02 10:47:04 +00:00
|
|
|
test_binary_slot(FullBin, Key, Hash, ExpectedValue) ->
|
|
|
|
% SW = os:timestamp(),
|
2017-03-07 20:19:11 +00:00
|
|
|
{ReturnedValue, _BLs, _Idx} = binaryslot_get(FullBin, Key, Hash),
|
2017-01-02 10:47:04 +00:00
|
|
|
?assertMatch(ExpectedValue, ReturnedValue).
|
|
|
|
% io:format(user, "Fetch success in ~w microseconds ~n",
|
|
|
|
% [timer:now_diff(os:timestamp(), SW)]).
|
2016-12-29 22:22:13 +00:00
|
|
|
|
2016-12-29 14:11:05 +00:00
|
|
|
|
|
|
|
|
2016-12-28 21:47:05 +00:00
|
|
|
merge_test() ->
|
|
|
|
N = 3000,
|
|
|
|
KVL1 = lists:ukeysort(1, generate_randomkeys(N + 1, N, 1, 20)),
|
|
|
|
KVL2 = lists:ukeysort(1, generate_randomkeys(1, N, 1, 20)),
|
|
|
|
KVL3 = lists:ukeymerge(1, KVL1, KVL2),
|
|
|
|
SW0 = os:timestamp(),
|
2017-03-09 21:23:09 +00:00
|
|
|
{ok, P1, {FK1, LK1}} = sst_new("../test/", "level1_src", 1, KVL1, 6000),
|
|
|
|
{ok, P2, {FK2, LK2}} = sst_new("../test/", "level2_src", 2, KVL2, 3000),
|
2016-12-28 21:47:05 +00:00
|
|
|
ExpFK1 = element(1, lists:nth(1, KVL1)),
|
|
|
|
ExpLK1 = element(1, lists:last(KVL1)),
|
|
|
|
ExpFK2 = element(1, lists:nth(1, KVL2)),
|
|
|
|
ExpLK2 = element(1, lists:last(KVL2)),
|
|
|
|
?assertMatch(ExpFK1, FK1),
|
|
|
|
?assertMatch(ExpFK2, FK2),
|
|
|
|
?assertMatch(ExpLK1, LK1),
|
|
|
|
?assertMatch(ExpLK2, LK2),
|
2017-01-17 10:14:40 +00:00
|
|
|
ML1 = [{next, #manifest_entry{owner = P1}, FK1}],
|
|
|
|
ML2 = [{next, #manifest_entry{owner = P2}, FK2}],
|
2017-03-10 20:43:37 +00:00
|
|
|
NewR = sst_new("../test/", "level2_merge", ML1, ML2, false, 2, N * 2),
|
|
|
|
{ok, P3, {{Rem1, Rem2}, FK3, LK3}} = NewR,
|
2016-12-28 21:47:05 +00:00
|
|
|
?assertMatch([], Rem1),
|
|
|
|
?assertMatch([], Rem2),
|
|
|
|
?assertMatch(true, FK3 == min(FK1, FK2)),
|
2017-03-10 20:43:37 +00:00
|
|
|
io:format("LK1 ~w LK2 ~w LK3 ~w~n", [LK1, LK2, LK3]),
|
2016-12-28 21:47:05 +00:00
|
|
|
?assertMatch(true, LK3 == max(LK1, LK2)),
|
|
|
|
io:format(user,
|
|
|
|
"Created and merged two files of size ~w in ~w microseconds~n",
|
|
|
|
[N, timer:now_diff(os:timestamp(), SW0)]),
|
|
|
|
|
|
|
|
SW1 = os:timestamp(),
|
|
|
|
lists:foreach(fun({K, V}) ->
|
|
|
|
?assertMatch({K, V}, sst_get(P3, K))
|
|
|
|
end,
|
|
|
|
KVL3),
|
|
|
|
io:format(user,
|
|
|
|
"Checked presence of all ~w objects in ~w microseconds~n",
|
|
|
|
[length(KVL3), timer:now_diff(os:timestamp(), SW1)]),
|
|
|
|
|
|
|
|
ok = sst_close(P1),
|
|
|
|
ok = sst_close(P2),
|
|
|
|
ok = sst_close(P3),
|
|
|
|
ok = file:delete("../test/level1_src.sst"),
|
|
|
|
ok = file:delete("../test/level2_src.sst"),
|
|
|
|
ok = file:delete("../test/level2_merge.sst").
|
2016-12-23 23:30:15 +00:00
|
|
|
|
2016-12-24 01:23:40 +00:00
|
|
|
|
2017-01-04 21:36:59 +00:00
|
|
|
simple_persisted_range_test() ->
|
2017-03-09 21:23:09 +00:00
|
|
|
{RP, Filename} = {"../test/", "simple_test"},
|
2017-03-21 16:54:23 +00:00
|
|
|
KVList0 = generate_randomkeys(1, ?LOOK_SLOTSIZE * 16, 1, 20),
|
2017-01-04 21:36:59 +00:00
|
|
|
KVList1 = lists:ukeysort(1, KVList0),
|
|
|
|
[{FirstKey, _FV}|_Rest] = KVList1,
|
|
|
|
{LastKey, _LV} = lists:last(KVList1),
|
2017-03-09 21:23:09 +00:00
|
|
|
{ok, Pid, {FirstKey, LastKey}} = sst_new(RP,
|
|
|
|
Filename,
|
2017-01-04 21:36:59 +00:00
|
|
|
1,
|
|
|
|
KVList1,
|
|
|
|
length(KVList1)),
|
|
|
|
|
|
|
|
{o, B, K, null} = LastKey,
|
|
|
|
SK1 = {o, B, K, 0},
|
|
|
|
EK1 = {o, B, K, 1},
|
|
|
|
FetchListA1 = sst_getkvrange(Pid, SK1, EK1, 1),
|
|
|
|
?assertMatch([], FetchListA1),
|
|
|
|
|
|
|
|
SK2 = element(1, lists:nth(127, KVList1)),
|
|
|
|
SK3 = element(1, lists:nth(128, KVList1)),
|
|
|
|
SK4 = element(1, lists:nth(129, KVList1)),
|
|
|
|
SK5 = element(1, lists:nth(130, KVList1)),
|
|
|
|
|
|
|
|
EK2 = element(1, lists:nth(255, KVList1)),
|
|
|
|
EK3 = element(1, lists:nth(256, KVList1)),
|
|
|
|
EK4 = element(1, lists:nth(257, KVList1)),
|
|
|
|
EK5 = element(1, lists:nth(258, KVList1)),
|
|
|
|
|
|
|
|
TestFun =
|
|
|
|
fun({SK, EK}) ->
|
|
|
|
FetchList = sst_getkvrange(Pid, SK, EK, 4),
|
|
|
|
?assertMatch(SK, element(1, lists:nth(1, FetchList))),
|
|
|
|
?assertMatch(EK, element(1, lists:last(FetchList)))
|
|
|
|
end,
|
|
|
|
|
|
|
|
TL2 = lists:map(fun(EK) -> {SK2, EK} end, [EK2, EK3, EK4, EK5]),
|
|
|
|
TL3 = lists:map(fun(EK) -> {SK3, EK} end, [EK2, EK3, EK4, EK5]),
|
|
|
|
TL4 = lists:map(fun(EK) -> {SK4, EK} end, [EK2, EK3, EK4, EK5]),
|
|
|
|
TL5 = lists:map(fun(EK) -> {SK5, EK} end, [EK2, EK3, EK4, EK5]),
|
|
|
|
lists:foreach(TestFun, TL2 ++ TL3 ++ TL4 ++ TL5).
|
2017-03-15 11:27:46 +00:00
|
|
|
|
|
|
|
additional_range_test() ->
|
|
|
|
% Test fetching ranges that fall into odd situations with regards to the
|
|
|
|
% summayr index
|
|
|
|
% - ranges which fall between entries in summary
|
|
|
|
% - ranges which go beyond the end of the range of the sst
|
|
|
|
% - ranges which match to an end key in the summary index
|
|
|
|
IK1 = lists:foldl(fun(X, Acc) ->
|
|
|
|
Acc ++ generate_indexkey(X, X)
|
|
|
|
end,
|
|
|
|
[],
|
|
|
|
lists:seq(1, ?NOLOOK_SLOTSIZE)),
|
|
|
|
Gap = 2,
|
|
|
|
IK2 = lists:foldl(fun(X, Acc) ->
|
|
|
|
Acc ++ generate_indexkey(X, X)
|
|
|
|
end,
|
|
|
|
[],
|
|
|
|
lists:seq(?NOLOOK_SLOTSIZE + Gap + 1,
|
|
|
|
2 * ?NOLOOK_SLOTSIZE + Gap)),
|
|
|
|
{ok,
|
|
|
|
P1,
|
|
|
|
{{Rem1, Rem2},
|
|
|
|
SK,
|
|
|
|
EK}} = sst_new("../test/", "range1_src", IK1, IK2, false, 1, 9999),
|
|
|
|
?assertMatch([], Rem1),
|
|
|
|
?assertMatch([], Rem2),
|
|
|
|
?assertMatch(SK, element(1, lists:nth(1, IK1))),
|
|
|
|
?assertMatch(EK, element(1, lists:last(IK2))),
|
|
|
|
|
|
|
|
% Basic test - checking scanwidth
|
|
|
|
R1 = sst_getkvrange(P1, SK, EK, 1),
|
|
|
|
?assertMatch(?NOLOOK_SLOTSIZE + 1, length(R1)),
|
|
|
|
QR1 = lists:sublist(R1, ?NOLOOK_SLOTSIZE),
|
|
|
|
?assertMatch(IK1, QR1),
|
|
|
|
R2 = sst_getkvrange(P1, SK, EK, 2),
|
|
|
|
?assertMatch(?NOLOOK_SLOTSIZE * 2, length(R2)),
|
|
|
|
QR2 = lists:sublist(R2, ?NOLOOK_SLOTSIZE),
|
|
|
|
QR3 = lists:sublist(R2, ?NOLOOK_SLOTSIZE + 1, 2 * ?NOLOOK_SLOTSIZE),
|
|
|
|
?assertMatch(IK1, QR2),
|
|
|
|
?assertMatch(IK2, QR3),
|
|
|
|
|
|
|
|
% Testing the gap
|
|
|
|
[GapSKV] = generate_indexkey(?NOLOOK_SLOTSIZE + 1, ?NOLOOK_SLOTSIZE + 1),
|
|
|
|
[GapEKV] = generate_indexkey(?NOLOOK_SLOTSIZE + 2, ?NOLOOK_SLOTSIZE + 2),
|
|
|
|
R3 = sst_getkvrange(P1, element(1, GapSKV), element(1, GapEKV), 1),
|
|
|
|
?assertMatch([], R3),
|
|
|
|
|
|
|
|
% Testing beyond the range
|
|
|
|
[PastEKV] = generate_indexkey(2 * ?NOLOOK_SLOTSIZE + Gap + 1,
|
|
|
|
2 * ?NOLOOK_SLOTSIZE + Gap + 1),
|
|
|
|
R4 = sst_getkvrange(P1, element(1, GapSKV), element(1, PastEKV), 2),
|
|
|
|
?assertMatch(IK2, R4),
|
2017-03-15 16:40:43 +00:00
|
|
|
R5 = sst_getkvrange(P1, SK, element(1, PastEKV), 2),
|
|
|
|
IKAll = IK1 ++ IK2,
|
|
|
|
?assertMatch(IKAll, R5),
|
2017-03-16 08:37:36 +00:00
|
|
|
[MidREKV] = generate_indexkey(?NOLOOK_SLOTSIZE + Gap + 2,
|
|
|
|
?NOLOOK_SLOTSIZE + Gap + 2),
|
|
|
|
io:format(user, "Mid second range to past range test~n", []),
|
|
|
|
R6 = sst_getkvrange(P1, element(1, MidREKV), element(1, PastEKV), 2),
|
|
|
|
Exp6 = lists:sublist(IK2, 2, length(IK2)),
|
|
|
|
?assertMatch(Exp6, R6),
|
2017-03-15 11:27:46 +00:00
|
|
|
|
|
|
|
% Testing at a slot end
|
|
|
|
Slot1EK = element(1, lists:last(IK1)),
|
2017-03-16 08:37:36 +00:00
|
|
|
R7 = sst_getkvrange(P1, SK, Slot1EK, 2),
|
|
|
|
?assertMatch(IK1, R7).
|
2017-01-04 21:36:59 +00:00
|
|
|
|
2017-03-16 08:43:18 +00:00
|
|
|
% Testing beyond end (should never happen if manifest behaves)
|
|
|
|
% Test blows up anyway
|
|
|
|
% R8 = sst_getkvrange(P1, element(1, PastEKV), element(1, PastEKV), 2),
|
|
|
|
% ?assertMatch([], R8).
|
|
|
|
|
2017-03-19 23:42:24 +00:00
|
|
|
|
|
|
|
simple_persisted_slotsize_test() ->
|
|
|
|
{RP, Filename} = {"../test/", "simple_slotsize_test"},
|
2017-03-21 16:54:23 +00:00
|
|
|
KVList0 = generate_randomkeys(1, ?LOOK_SLOTSIZE * 2, 1, 20),
|
|
|
|
KVList1 = lists:sublist(lists:ukeysort(1, KVList0),
|
|
|
|
?LOOK_SLOTSIZE),
|
2017-03-19 23:42:24 +00:00
|
|
|
[{FirstKey, _FV}|_Rest] = KVList1,
|
|
|
|
{LastKey, _LV} = lists:last(KVList1),
|
|
|
|
{ok, Pid, {FirstKey, LastKey}} = sst_new(RP,
|
|
|
|
Filename,
|
|
|
|
1,
|
|
|
|
KVList1,
|
|
|
|
length(KVList1)),
|
|
|
|
lists:foreach(fun({K, V}) ->
|
|
|
|
?assertMatch({K, V}, sst_get(Pid, K))
|
|
|
|
end,
|
|
|
|
KVList1),
|
|
|
|
ok = sst_close(Pid),
|
|
|
|
ok = file:delete(filename:join(RP, Filename ++ ".sst")).
|
2017-01-04 21:36:59 +00:00
|
|
|
|
2016-12-24 17:48:31 +00:00
|
|
|
simple_persisted_test() ->
|
2017-03-09 21:23:09 +00:00
|
|
|
{RP, Filename} = {"../test/", "simple_test"},
|
2017-03-21 16:54:23 +00:00
|
|
|
KVList0 = generate_randomkeys(1, ?LOOK_SLOTSIZE * 32, 1, 20),
|
2016-12-24 17:48:31 +00:00
|
|
|
KVList1 = lists:ukeysort(1, KVList0),
|
|
|
|
[{FirstKey, _FV}|_Rest] = KVList1,
|
|
|
|
{LastKey, _LV} = lists:last(KVList1),
|
2017-03-09 21:23:09 +00:00
|
|
|
{ok, Pid, {FirstKey, LastKey}} = sst_new(RP,
|
|
|
|
Filename,
|
2016-12-29 02:07:14 +00:00
|
|
|
1,
|
|
|
|
KVList1,
|
|
|
|
length(KVList1)),
|
2017-01-03 13:03:59 +00:00
|
|
|
SW0 = os:timestamp(),
|
|
|
|
lists:foreach(fun({K, V}) ->
|
|
|
|
?assertMatch({K, V}, sst_get(Pid, K))
|
|
|
|
end,
|
|
|
|
KVList1),
|
|
|
|
io:format(user,
|
|
|
|
"Checking for ~w keys (once) in file with cache hit took ~w "
|
|
|
|
++ "microseconds~n",
|
|
|
|
[length(KVList1), timer:now_diff(os:timestamp(), SW0)]),
|
2016-12-24 17:59:07 +00:00
|
|
|
SW1 = os:timestamp(),
|
2016-12-24 17:48:31 +00:00
|
|
|
lists:foreach(fun({K, V}) ->
|
|
|
|
?assertMatch({K, V}, sst_get(Pid, K)),
|
|
|
|
?assertMatch({K, V}, sst_get(Pid, K))
|
|
|
|
end,
|
|
|
|
KVList1),
|
|
|
|
io:format(user,
|
|
|
|
"Checking for ~w keys (twice) in file with cache hit took ~w "
|
|
|
|
++ "microseconds~n",
|
2016-12-24 17:59:07 +00:00
|
|
|
[length(KVList1), timer:now_diff(os:timestamp(), SW1)]),
|
2016-12-28 15:48:04 +00:00
|
|
|
ok = sst_printtimings(Pid),
|
2017-03-21 16:54:23 +00:00
|
|
|
KVList2 = generate_randomkeys(1, ?LOOK_SLOTSIZE * 32, 1, 20),
|
2016-12-24 17:59:07 +00:00
|
|
|
MapFun =
|
|
|
|
fun({K, V}, Acc) ->
|
|
|
|
In = lists:keymember(K, 1, KVList1),
|
|
|
|
case {K > FirstKey, LastKey > K, In} of
|
|
|
|
{true, true, false} ->
|
2016-12-24 18:03:34 +00:00
|
|
|
[{K, leveled_codec:magic_hash(K), V}|Acc];
|
2016-12-24 17:59:07 +00:00
|
|
|
_ ->
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
KVList3 = lists:foldl(MapFun, [], KVList2),
|
|
|
|
SW2 = os:timestamp(),
|
2016-12-24 18:03:34 +00:00
|
|
|
lists:foreach(fun({K, H, _V}) ->
|
|
|
|
?assertMatch(not_present, sst_get(Pid, K, H))
|
2016-12-24 17:59:07 +00:00
|
|
|
end,
|
|
|
|
KVList3),
|
|
|
|
io:format(user,
|
|
|
|
"Checking for ~w missing keys took ~w microseconds~n",
|
|
|
|
[length(KVList3), timer:now_diff(os:timestamp(), SW2)]),
|
2016-12-28 15:48:04 +00:00
|
|
|
ok = sst_printtimings(Pid),
|
|
|
|
FetchList1 = sst_getkvrange(Pid, all, all, 2),
|
|
|
|
FoldFun = fun(X, Acc) ->
|
|
|
|
case X of
|
|
|
|
{pointer, P, S, SK, EK} ->
|
2017-01-02 10:47:04 +00:00
|
|
|
Acc ++ sst_getslots(P, [{pointer, P, S, SK, EK}]);
|
2016-12-28 15:48:04 +00:00
|
|
|
_ ->
|
|
|
|
Acc ++ [X]
|
|
|
|
end end,
|
|
|
|
FetchedList1 = lists:foldl(FoldFun, [], FetchList1),
|
|
|
|
?assertMatch(KVList1, FetchedList1),
|
|
|
|
|
|
|
|
{TenthKey, _v10} = lists:nth(10, KVList1),
|
|
|
|
{Three000Key, _v300} = lists:nth(300, KVList1),
|
|
|
|
SubKVList1 = lists:sublist(KVList1, 10, 291),
|
|
|
|
SubKVList1L = length(SubKVList1),
|
|
|
|
FetchList2 = sst_getkvrange(Pid, TenthKey, Three000Key, 2),
|
2016-12-28 21:47:05 +00:00
|
|
|
?assertMatch(pointer, element(1, lists:last(FetchList2))),
|
2016-12-28 15:48:04 +00:00
|
|
|
FetchedList2 = lists:foldl(FoldFun, [], FetchList2),
|
|
|
|
?assertMatch(SubKVList1L, length(FetchedList2)),
|
|
|
|
?assertMatch(SubKVList1, FetchedList2),
|
|
|
|
|
2016-12-28 21:47:05 +00:00
|
|
|
{Eight000Key, _v800} = lists:nth(800, KVList1),
|
|
|
|
SubKVListA1 = lists:sublist(KVList1, 10, 791),
|
|
|
|
SubKVListA1L = length(SubKVListA1),
|
|
|
|
FetchListA2 = sst_getkvrange(Pid, TenthKey, Eight000Key, 2),
|
|
|
|
?assertMatch(pointer, element(1, lists:last(FetchListA2))),
|
|
|
|
FetchedListA2 = lists:foldl(FoldFun, [], FetchListA2),
|
|
|
|
?assertMatch(SubKVListA1L, length(FetchedListA2)),
|
|
|
|
?assertMatch(SubKVListA1, FetchedListA2),
|
|
|
|
|
|
|
|
FetchListB2 = sst_getkvrange(Pid, TenthKey, Eight000Key, 4),
|
|
|
|
?assertMatch(pointer, element(1, lists:last(FetchListB2))),
|
|
|
|
FetchedListB2 = lists:foldl(FoldFun, [], FetchListB2),
|
|
|
|
?assertMatch(SubKVListA1L, length(FetchedListB2)),
|
|
|
|
?assertMatch(SubKVListA1, FetchedListB2),
|
|
|
|
|
2016-12-29 02:07:14 +00:00
|
|
|
FetchListB3 = sst_getkvrange(Pid,
|
|
|
|
Eight000Key,
|
|
|
|
{o, null, null, null},
|
|
|
|
4),
|
|
|
|
FetchedListB3 = lists:foldl(FoldFun, [], FetchListB3),
|
|
|
|
SubKVListA3 = lists:nthtail(800 - 1, KVList1),
|
|
|
|
SubKVListA3L = length(SubKVListA3),
|
|
|
|
io:format("Length expected ~w~n", [SubKVListA3L]),
|
|
|
|
?assertMatch(SubKVListA3L, length(FetchedListB3)),
|
|
|
|
?assertMatch(SubKVListA3, FetchedListB3),
|
|
|
|
|
2016-12-29 04:37:49 +00:00
|
|
|
io:format("Eight hundredth key ~w~n", [Eight000Key]),
|
|
|
|
FetchListB4 = sst_getkvrange(Pid,
|
|
|
|
Eight000Key,
|
|
|
|
Eight000Key,
|
|
|
|
4),
|
|
|
|
FetchedListB4 = lists:foldl(FoldFun, [], FetchListB4),
|
|
|
|
?assertMatch([{Eight000Key, _v800}], FetchedListB4),
|
|
|
|
|
2016-12-24 17:48:31 +00:00
|
|
|
ok = sst_close(Pid),
|
2017-03-09 21:23:09 +00:00
|
|
|
ok = file:delete(filename:join(RP, Filename ++ ".sst")).
|
2016-12-24 17:48:31 +00:00
|
|
|
|
2016-12-29 02:07:14 +00:00
|
|
|
key_dominates_test() ->
|
|
|
|
KV1 = {{o, "Bucket", "Key1", null}, {5, {active, infinity}, 0, []}},
|
|
|
|
KV2 = {{o, "Bucket", "Key3", null}, {6, {active, infinity}, 0, []}},
|
|
|
|
KV3 = {{o, "Bucket", "Key2", null}, {3, {active, infinity}, 0, []}},
|
|
|
|
KV4 = {{o, "Bucket", "Key4", null}, {7, {active, infinity}, 0, []}},
|
|
|
|
KV5 = {{o, "Bucket", "Key1", null}, {4, {active, infinity}, 0, []}},
|
|
|
|
KV6 = {{o, "Bucket", "Key1", null}, {99, {tomb, 999}, 0, []}},
|
|
|
|
KV7 = {{o, "Bucket", "Key1", null}, {99, tomb, 0, []}},
|
|
|
|
KL1 = [KV1, KV2],
|
|
|
|
KL2 = [KV3, KV4],
|
|
|
|
?assertMatch({{next_key, KV1}, [KV2], KL2},
|
|
|
|
key_dominates(KL1, KL2, {undefined, 1})),
|
|
|
|
?assertMatch({{next_key, KV1}, KL2, [KV2]},
|
|
|
|
key_dominates(KL2, KL1, {undefined, 1})),
|
|
|
|
?assertMatch({skipped_key, KL2, KL1},
|
|
|
|
key_dominates([KV5|KL2], KL1, {undefined, 1})),
|
|
|
|
?assertMatch({{next_key, KV1}, [KV2], []},
|
|
|
|
key_dominates(KL1, [], {undefined, 1})),
|
|
|
|
?assertMatch({skipped_key, [KV6|KL2], [KV2]},
|
|
|
|
key_dominates([KV6|KL2], KL1, {undefined, 1})),
|
|
|
|
?assertMatch({{next_key, KV6}, KL2, [KV2]},
|
|
|
|
key_dominates([KV6|KL2], [KV2], {undefined, 1})),
|
|
|
|
?assertMatch({skipped_key, [KV6|KL2], [KV2]},
|
|
|
|
key_dominates([KV6|KL2], KL1, {true, 1})),
|
|
|
|
?assertMatch({skipped_key, [KV6|KL2], [KV2]},
|
|
|
|
key_dominates([KV6|KL2], KL1, {true, 1000})),
|
|
|
|
?assertMatch({{next_key, KV6}, KL2, [KV2]},
|
|
|
|
key_dominates([KV6|KL2], [KV2], {true, 1})),
|
|
|
|
?assertMatch({skipped_key, KL2, [KV2]},
|
|
|
|
key_dominates([KV6|KL2], [KV2], {true, 1000})),
|
|
|
|
?assertMatch({skipped_key, [], []},
|
|
|
|
key_dominates([KV6], [], {true, 1000})),
|
|
|
|
?assertMatch({skipped_key, [], []},
|
|
|
|
key_dominates([], [KV6], {true, 1000})),
|
|
|
|
?assertMatch({{next_key, KV6}, [], []},
|
|
|
|
key_dominates([KV6], [], {true, 1})),
|
|
|
|
?assertMatch({{next_key, KV6}, [], []},
|
|
|
|
key_dominates([], [KV6], {true, 1})),
|
|
|
|
?assertMatch({skipped_key, [], []},
|
|
|
|
key_dominates([KV7], [], {true, 1})),
|
|
|
|
?assertMatch({skipped_key, [], []},
|
|
|
|
key_dominates([], [KV7], {true, 1})),
|
|
|
|
?assertMatch({skipped_key, [KV7|KL2], [KV2]},
|
|
|
|
key_dominates([KV7|KL2], KL1, {undefined, 1})),
|
|
|
|
?assertMatch({{next_key, KV7}, KL2, [KV2]},
|
|
|
|
key_dominates([KV7|KL2], [KV2], {undefined, 1})),
|
|
|
|
?assertMatch({skipped_key, [KV7|KL2], [KV2]},
|
|
|
|
key_dominates([KV7|KL2], KL1, {true, 1})),
|
|
|
|
?assertMatch({skipped_key, KL2, [KV2]},
|
|
|
|
key_dominates([KV7|KL2], [KV2], {true, 1})).
|
|
|
|
|
2017-01-04 21:36:59 +00:00
|
|
|
nonsense_coverage_test() ->
|
|
|
|
{ok, Pid} = gen_fsm:start(?MODULE, [], []),
|
|
|
|
ok = gen_fsm:send_all_state_event(Pid, nonsense),
|
|
|
|
?assertMatch({next_state, reader, #state{}}, handle_info(nonsense,
|
|
|
|
reader,
|
|
|
|
#state{})),
|
|
|
|
?assertMatch({ok, reader, #state{}}, code_change(nonsense,
|
|
|
|
reader,
|
|
|
|
#state{},
|
2017-02-26 21:37:47 +00:00
|
|
|
nonsense)),
|
|
|
|
?assertMatch({reply, undefined, reader, #state{}},
|
|
|
|
handle_sync_event("hello", self(), reader, #state{})).
|
2016-12-29 02:07:14 +00:00
|
|
|
|
2017-07-31 19:30:29 +02:00
|
|
|
-endif.
|