From 75091914669e98450223476d3e22c2278c858411 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Tue, 14 Mar 2023 16:27:08 +0000 Subject: [PATCH] Initial support for OTP 26 (#395) * Initial support for OTP 26 * Extend timeout in test --- src/leveled_bookie.erl | 15 ++-- src/leveled_cdb.erl | 178 ++++++++++++++++++++------------------ src/leveled_codec.erl | 52 +++++------ src/leveled_ebloom.erl | 10 +-- src/leveled_head.erl | 3 +- src/leveled_iclerk.erl | 6 +- src/leveled_imanifest.erl | 4 +- src/leveled_inker.erl | 30 +++---- src/leveled_log.erl | 10 +-- src/leveled_monitor.erl | 10 +-- src/leveled_pclerk.erl | 9 +- src/leveled_penciller.erl | 73 ++++++++-------- src/leveled_pmanifest.erl | 4 +- src/leveled_pmem.erl | 6 +- src/leveled_runner.erl | 9 +- src/leveled_sst.erl | 52 +++++------ src/leveled_tictac.erl | 34 ++++---- src/leveled_tree.erl | 20 +++-- src/leveled_util.erl | 6 +- 19 files changed, 273 insertions(+), 258 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index ffb5575..a0abc7e 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -102,8 +102,6 @@ -export([book_returnactors/1]). -endif. --include_lib("eunit/include/eunit.hrl"). - -define(LOADING_PAUSE, 1000). -define(CACHE_SIZE, 2500). -define(MAX_CACHE_MULTTIPLE, 2). @@ -1153,11 +1151,6 @@ book_addlogs(Pid, ForcedLogs) -> book_removelogs(Pid, ForcedLogs) -> gen_server:cast(Pid, {remove_logs, ForcedLogs}). -%% @doc -%% Return the Inker and Penciller - {ok, Inker, Penciller}. Used only in tests -book_returnactors(Pid) -> - gen_server:call(Pid, return_actors). - %%%============================================================================ %%% gen_server callbacks @@ -2549,6 +2542,14 @@ maybelog_snap_timing({Pid, _StatsFreq}, BookieTime, PCLTime) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +%% @doc +%% Return the Inker and Penciller - {ok, Inker, Penciller}. Used only in tests +book_returnactors(Pid) -> + gen_server:call(Pid, return_actors). + + reset_filestructure() -> RootPath = "test/test_area", leveled_inker:clean_testdir(RootPath ++ "/" ++ ?JOURNAL_FP), diff --git a/src/leveled_cdb.erl b/src/leveled_cdb.erl index 8939a39..c7a8014 100644 --- a/src/leveled_cdb.erl +++ b/src/leveled_cdb.erl @@ -95,8 +95,6 @@ -export([finished_rolling/1, hashtable_calc/2]). --include_lib("eunit/include/eunit.hrl"). - -define(DWORD_SIZE, 8). -define(WORD_SIZE, 4). -define(MAX_FILE_SIZE, 3221225472). @@ -969,8 +967,8 @@ open_active_file(FileName) when is_list(FileName) -> end, {LastPosition, HashTree, LastKey}. --spec put(list()|file:io_device(), - any(), any(), +-spec put(file:io_device(), + any(), any(), {integer(), ets:tid()}, boolean(), integer(), boolean()) -> roll|{file:io_device(), integer(), ets:tid()}. %% @doc @@ -978,17 +976,7 @@ open_active_file(FileName) when is_list(FileName) -> %% Append to an active file a new key/value pair returning an updated %% dictionary of Keys and positions. Returns an updated Position %% -put(FileName, - Key, - Value, - {LastPosition, HashTree}, - BinaryMode, - MaxSize, - IsEmpty) when is_list(FileName) -> - {ok, Handle} = file:open(FileName, ?WRITE_OPS), - put(Handle, Key, Value, {LastPosition, HashTree}, - BinaryMode, MaxSize, IsEmpty); -put(Handle, Key, Value, {LastPosition, HashTree}, +put(Handle, Key, Value, {LastPosition, HashTree}, BinaryMode, MaxSize, IsEmpty) -> Bin = key_value_to_record({Key, Value}, BinaryMode), ObjectSize = byte_size(Bin), @@ -1049,14 +1037,12 @@ get_withcache(Handle, Key, Cache, BinaryMode, Monitor) -> get_withcache(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor) -> get(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor). -get(FileNameOrHandle, Key, BinaryMode) -> - get(FileNameOrHandle, Key, no_cache, true, BinaryMode, {no_monitor, 0}). - -spec get( - list()|file:io_device(), - any(), no_cache|tuple(), - loose_presence|any(), + file:io_device(), + any(), + tuple(), + loose_presence|any(), boolean(), leveled_monitor:monitor()) -> tuple()|probably|missing. %% @doc @@ -1067,16 +1053,15 @@ get(FileNameOrHandle, Key, BinaryMode) -> %% that Key) %% %% Timings also passed in and can be updated based on results -get(FileName, Key, Cache, QuickCheck, BinaryMode, Monitor) - when is_list(FileName) -> - {ok, Handle} = file:open(FileName,[binary, raw, read]), - get(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor); -get(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor) +get(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor) when is_tuple(Handle) -> + get(Handle, Key, Cache, fun get_index/3, QuickCheck, BinaryMode, Monitor). + +get(Handle, Key, Cache, CacheFun, QuickCheck, BinaryMode, Monitor) -> SW0 = leveled_monitor:maybe_time(Monitor), Hash = hash(Key), Index = hash_to_index(Hash), - {HashTable, Count} = get_index(Handle, Index, Cache), + {HashTable, Count} = CacheFun(Handle, Index, Cache), {TS0, SW1} = leveled_monitor:step_time(SW0), % If the count is 0 for that index - key must be missing case Count of @@ -1100,10 +1085,6 @@ get(Handle, Key, Cache, QuickCheck, BinaryMode, Monitor) Result end. -get_index(Handle, Index, no_cache) -> - {ok,_} = file:position(Handle, {bof, ?DWORD_SIZE * Index}), - % Get location of hashtable and number of entries in the hash - read_next_2_integers(Handle); get_index(_Handle, Index, Cache) -> element(Index + 1, Cache). @@ -1114,9 +1095,6 @@ get_index(_Handle, Index, Cache) -> get_mem(Key, FNOrHandle, HashTree, BinaryMode) -> get_mem(Key, FNOrHandle, HashTree, BinaryMode, true). -get_mem(Key, Filename, HashTree, BinaryMode, QuickCheck) when is_list(Filename) -> - {ok, Handle} = file:open(Filename, [binary, raw, read]), - get_mem(Key, Handle, HashTree, BinaryMode, QuickCheck); get_mem(Key, Handle, HashTree, BinaryMode, QuickCheck) -> ListToCheck = get_hashtree(Key, HashTree), case {QuickCheck, ListToCheck} of @@ -1609,26 +1587,6 @@ maybelog_get_timing({Pid, _StatsFreq}, IndexTime, ReadTime, CycleCount) -> Pid, {cdb_get_update, CycleCount, IndexTime, ReadTime}). -% Write Key and Value tuples into the CDB. Each tuple consists of a -% 4 byte key length, a 4 byte value length, the actual key followed -% by the value. -% -% Returns a dictionary that is keyed by -% the least significant 8 bits of each hash with the -% values being a list of the hash and the position of the -% key/value binary in the file. -write_key_value_pairs(Handle, KeyValueList) -> - {ok, Position} = file:position(Handle, cur), - HashTree = new_hashtree(), - write_key_value_pairs(Handle, KeyValueList, {Position, HashTree}). - -write_key_value_pairs(_, [], Acc) -> - Acc; -write_key_value_pairs(Handle, [HeadPair|TailList], Acc) -> - {Key, Value} = HeadPair, - {Handle, NewPosition, HashTree} = put(Handle, Key, Value, Acc), - write_key_value_pairs(Handle, TailList, {NewPosition, HashTree}). - %% Write the actual hashtables at the bottom of the file. Each hash table %% entry is a doubleword in length. The first word is the hash value %% corresponding to a key and the second word is a file pointer to the @@ -1855,6 +1813,60 @@ write_hash_tables([Index|Rest], HashTree, CurrPos, BasePos, %%%%%%%%%%%%%%% -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +% Write Key and Value tuples into the CDB. Each tuple consists of a +% 4 byte key length, a 4 byte value length, the actual key followed +% by the value. +% +% Returns a dictionary that is keyed by +% the least significant 8 bits of each hash with the +% values being a list of the hash and the position of the +% key/value binary in the file. +write_key_value_pairs(Handle, KeyValueList) -> + {ok, Position} = file:position(Handle, cur), + HashTree = new_hashtree(), + write_key_value_pairs(Handle, KeyValueList, {Position, HashTree}). + +write_key_value_pairs(_, [], Acc) -> + Acc; +write_key_value_pairs(Handle, [HeadPair|TailList], Acc) -> + {Key, Value} = HeadPair, + {Handle, NewPosition, HashTree} = put(Handle, Key, Value, Acc), + write_key_value_pairs(Handle, TailList, {NewPosition, HashTree}). + +get(FileName, Key, BinaryMode) when is_list(FileName) -> + {ok, Handle} = file:open(FileName,[binary, raw, read]), + get(Handle, Key, BinaryMode); +get(Handle, Key, BinaryMode) -> + get( + Handle, Key, no_cache, fun get_uncached_index/3, + true, BinaryMode, {no_monitor, 0}). + +get_uncached_index(Handle, Index, no_cache) -> + {ok,_} = file:position(Handle, {bof, ?DWORD_SIZE * Index}), + % Get location of hashtable and number of entries in the hash + read_next_2_integers(Handle). + +file_put(FileName, + Key, + Value, + {LastPosition, HashTree}, + BinaryMode, + MaxSize, + IsEmpty) when is_list(FileName) -> +{ok, Handle} = file:open(FileName, ?WRITE_OPS), +put(Handle, Key, Value, {LastPosition, HashTree}, + BinaryMode, MaxSize, IsEmpty). + +file_get_mem(Key, Filename, HashTree, BinaryMode) -> + file_get_mem(Key, Filename, HashTree, BinaryMode, true). + +file_get_mem(Key, Filename, HashTree, BinaryMode, QuickCheck) + when is_list(Filename) -> + {ok, Handle} = file:open(Filename, [binary, raw, read]), + get_mem(Key, Handle, HashTree, BinaryMode, QuickCheck). + %% To make this compatible with original Bernstein format this endian flip %% and also the use of the standard hash function required. endian_flip(Int) -> @@ -1883,10 +1895,12 @@ create(FileName,KeyValueList) -> %% Should not be used for non-test PUTs by the inker - as the Max File Size %% should be taken from the startup options not the default -put(FileName, Key, Value, {LastPosition, HashTree}) -> - put(FileName, Key, Value, {LastPosition, HashTree}, - ?BINARY_MODE, ?MAX_FILE_SIZE, false). - +put(FileName, Key, Value, {LastPosition, HashTree}) when is_list(FileName) -> + file_put(FileName, Key, Value, {LastPosition, HashTree}, + ?BINARY_MODE, ?MAX_FILE_SIZE, false); +put(Handle, Key, Value, {LastPosition, HashTree}) -> + put(Handle, Key, Value, {LastPosition, HashTree}, + ?BINARY_MODE, ?MAX_FILE_SIZE, false). dump(FileName) -> {ok, Handle} = file:open(FileName, [binary, raw, read]), @@ -2172,27 +2186,25 @@ activewrite_singlewrite_test() -> open_active_file("test/test_area/test_mem.cdb"), io:format("File opened as new active file " "with LastPosition=~w ~n", [LastPosition]), - {_, _, UpdKeyDict} = put("test/test_area/test_mem.cdb", - Key, Value, - {LastPosition, KeyDict}), + {_, _, UpdKeyDict} = + put( + "test/test_area/test_mem.cdb", + Key, Value, {LastPosition, KeyDict}), io:format("New key and value added to active file ~n", []), - ?assertMatch({Key, Value}, - get_mem(Key, - "test/test_area/test_mem.cdb", - UpdKeyDict, - false)), - ?assertMatch(probably, - get_mem(Key, - "test/test_area/test_mem.cdb", - UpdKeyDict, - false, - loose_presence)), - ?assertMatch(missing, - get_mem("not_present", - "test/test_area/test_mem.cdb", - UpdKeyDict, - false, - loose_presence)), + ?assertMatch( + {Key, Value}, + file_get_mem( + Key, "test/test_area/test_mem.cdb", UpdKeyDict, false)), + ?assertMatch( + probably, + file_get_mem( + Key, "test/test_area/test_mem.cdb", + UpdKeyDict, false, loose_presence)), + ?assertMatch( + missing, + file_get_mem( + "not_present", "test/test_area/test_mem.cdb", + UpdKeyDict, false, loose_presence)), ok = file:delete("test/test_area/test_mem.cdb"). search_hash_table_findinslot_test() -> @@ -2220,11 +2232,13 @@ search_hash_table_findinslot_test() -> ?assertMatch({"key1", "value1"}, get(Handle, Key1, false)), NoMonitor = {no_monitor, 0}, ?assertMatch( - probably, - get(Handle, Key1, no_cache, loose_presence, false, NoMonitor)), + probably, + get(Handle, Key1, no_cache, fun get_uncached_index/3, + loose_presence, false, NoMonitor)), ?assertMatch( - missing, - get(Handle, "Key99", no_cache, loose_presence, false, NoMonitor)), + missing, + get(Handle, "Key99", no_cache, fun get_uncached_index/3, + loose_presence, false, NoMonitor)), {ok, _} = file:position(Handle, FirstHashPosition), FlipH3 = endian_flip(ReadH3), FlipP3 = endian_flip(ReadP3), diff --git a/src/leveled_codec.erl b/src/leveled_codec.erl index ca1a3dd..0e347dd 100644 --- a/src/leveled_codec.erl +++ b/src/leveled_codec.erl @@ -10,8 +10,6 @@ -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - -export([ inker_reload_strategy/1, strip_to_seqonly/1, @@ -137,29 +135,31 @@ :: list(integer())|false. -export_type([tag/0, - key/0, - sqn/0, - object_spec/0, - segment_hash/0, - ledger_status/0, - ledger_key/0, - ledger_value/0, - ledger_kv/0, - compaction_strategy/0, - compaction_method/0, - journal_key_tag/0, - journal_key/0, - journal_ref/0, - compression_method/0, - journal_keychanges/0, - index_specs/0, - segment_list/0, - maybe_lookup/0, - last_moddate/0, - lastmod_range/0, - regular_expression/0, - value_fetcher/0, - proxy_object/0]). + key/0, + sqn/0, + object_spec/0, + segment_hash/0, + ledger_status/0, + ledger_key/0, + ledger_value/0, + ledger_kv/0, + compaction_strategy/0, + compaction_method/0, + journal_key_tag/0, + journal_key/0, + journal_ref/0, + compression_method/0, + journal_keychanges/0, + index_specs/0, + segment_list/0, + maybe_lookup/0, + last_moddate/0, + lastmod_range/0, + regular_expression/0, + value_fetcher/0, + proxy_object/0, + slimmed_key/0 + ]). %%%============================================================================ @@ -765,6 +765,8 @@ next_key({Type, Bucket}) when is_binary(Type), is_binary(Bucket) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + valid_ledgerkey_test() -> UserDefTag = {user_defined, <<"B">>, <<"K">>, null}, ?assertMatch(true, isvalid_ledgerkey(UserDefTag)), diff --git a/src/leveled_ebloom.erl b/src/leveled_ebloom.erl index 6484891..f5050ed 100644 --- a/src/leveled_ebloom.erl +++ b/src/leveled_ebloom.erl @@ -9,12 +9,10 @@ -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - -export([ - create_bloom/1, - check_hash/2 - ]). + create_bloom/1, + check_hash/2 + ]). -define(BLOOM_SIZE_BYTES, 512). -define(INTEGER_SIZE, 4096). @@ -506,6 +504,8 @@ add_hashlist([{_SegHash, TopHash}|T], -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + generate_orderedkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) -> generate_orderedkeys(Seqn, Count, diff --git a/src/leveled_head.erl b/src/leveled_head.erl index 7224e7f..248c095 100644 --- a/src/leveled_head.erl +++ b/src/leveled_head.erl @@ -18,8 +18,6 @@ -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - -export([key_to_canonicalbinary/1, build_head/2, extract_metadata/3, @@ -455,6 +453,7 @@ assemble_index_specs(Indexes, IndexOp) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). index_extract_test() -> SibMetaBin = diff --git a/src/leveled_iclerk.erl b/src/leveled_iclerk.erl index 7ae3b44..6dd33dc 100644 --- a/src/leveled_iclerk.erl +++ b/src/leveled_iclerk.erl @@ -93,8 +93,6 @@ -export([schedule_compaction/3]). --include_lib("eunit/include/eunit.hrl"). - -define(JOURNAL_FILEX, "cdb"). -define(PENDING_FILEX, "pnd"). -define(SAMPLE_SIZE, 192). @@ -863,7 +861,7 @@ filter_output_fun(FilterFun, FilterServer, MaxSQN, Strategy) -> -spec to_retain(leveled_codec:journal_key(), leveled_inker:filterfun(), - leveled_inker:fillter_server(), + leveled_inker:filterserver(), leveled_codec:sqn(), leveled_codec:compaction_strategy()) -> boolean()|convert. to_retain(JournalKey, FilterFun, FilterServer, MaxSQN, ReloadStrategy) -> @@ -959,6 +957,8 @@ clear_waste(State) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + schedule_test() -> schedule_test_bycount(1), schedule_test_bycount(2), diff --git a/src/leveled_imanifest.erl b/src/leveled_imanifest.erl index 115e5f9..8532af2 100644 --- a/src/leveled_imanifest.erl +++ b/src/leveled_imanifest.erl @@ -6,8 +6,6 @@ -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - -export([ generate_entry/1, add_entry/3, @@ -251,6 +249,8 @@ find_subentry(SQN, [_TopEntry|Tail]) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + build_testmanifest_aslist() -> ManifestMapFun = fun(N) -> diff --git a/src/leveled_inker.erl b/src/leveled_inker.erl index 013bb5f..be19f5d 100644 --- a/src/leveled_inker.erl +++ b/src/leveled_inker.erl @@ -120,12 +120,11 @@ ink_removelogs/2, ink_getjournalsqn/1]). --export([build_dummy_journal/0, - clean_testdir/1, - filepath/2, - filepath/3]). +-export([filepath/2, filepath/3]). --include_lib("eunit/include/eunit.hrl"). +-ifdef(TEST). +-export([build_dummy_journal/0, clean_testdir/1]). +-endif. -define(MANIFEST_FP, "journal_manifest"). -define(FILES_FP, "journal_files"). @@ -155,7 +154,7 @@ -type inker_options() :: #inker_options{}. -type ink_state() :: #state{}. --type registered_snapshot() :: {pid(), os:timestamp(), integer()}. +-type registered_snapshot() :: {pid(), erlang:timestamp(), integer()}. -type filterserver() :: pid()|list(tuple()). -type filterfun() :: fun((filterserver(), leveled_codec:ledger_key(), leveled_codec:sqn()) -> @@ -385,17 +384,6 @@ ink_compactjournal(Pid, Bookie, _Timeout) -> CheckerFilterFun}, infinity). -%% Allows the Checker to be overriden in test, use something other than a -%% penciller -ink_compactjournal(Pid, Checker, InitiateFun, CloseFun, FilterFun, _Timeout) -> - gen_server:call(Pid, - {compact, - Checker, - InitiateFun, - CloseFun, - FilterFun}, - infinity). - -spec ink_clerkcomplete(pid(), list(), list()) -> ok. %% @doc %% Used by a clerk to state that a compaction process is over, only change @@ -1277,6 +1265,14 @@ wrap_checkfilterfun(CheckFilterFun) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +%% Allows the Checker to be overriden in test, use something other than a +%% penciller +ink_compactjournal(Pid, Checker, InitiateFun, CloseFun, FilterFun, _Timeout) -> + gen_server:call( + Pid, {compact, Checker, InitiateFun, CloseFun, FilterFun}, infinity). + create_value_for_journal(Obj, Comp) -> leveled_codec:create_value_for_journal(Obj, Comp, native). diff --git a/src/leveled_log.erl b/src/leveled_log.erl index 481976c..17d4018 100644 --- a/src/leveled_log.erl +++ b/src/leveled_log.erl @@ -5,8 +5,6 @@ -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - -export([log/2, log_timer/3, log_randomtimer/4]). @@ -401,9 +399,6 @@ log(LogRef, Subs, SupportedLogLevels) -> ok end. -should_i_log(LogLevel, Levels, LogRef) -> - should_i_log(LogLevel, Levels, LogRef, get_opts()). - should_i_log(LogLevel, Levels, LogRef, LogOpts) -> #log_options{log_level = CurLevel, forced_logs = ForcedLogs} = LogOpts, case lists:member(LogRef, ForcedLogs) of @@ -494,6 +489,11 @@ duration_text(StartTime) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +should_i_log(LogLevel, Levels, LogRef) -> + should_i_log(LogLevel, Levels, LogRef, get_opts()). + format_time({{Y, M, D}, {H, Mi, S, Ms}}) -> io_lib:format("~b-~2..0b-~2..0b", [Y, M, D]) ++ "T" ++ io_lib:format("~2..0b:~2..0b:~2..0b.~3..0b", [H, Mi, S, Ms]). diff --git a/src/leveled_monitor.erl b/src/leveled_monitor.erl index fe7438e..78b10db 100644 --- a/src/leveled_monitor.erl +++ b/src/leveled_monitor.erl @@ -39,8 +39,6 @@ log_remove/2, get_defaults/0]). --include_lib("eunit/include/eunit.hrl"). - -define(LOG_LIST, [bookie_get, bookie_put, bookie_head, bookie_snap, pcl_fetch, sst_fetch, cdb_get]). @@ -204,7 +202,7 @@ log_add(Pid, ForcedLogs) -> log_remove(Pid, ForcedLogs) -> gen_server:cast(Pid, {log_remove, ForcedLogs}). --spec maybe_time(monitor()) -> os:timestamp()|no_timing. +-spec maybe_time(monitor()) -> erlang:timestamp()|no_timing. maybe_time({_Pid, TimingProbability}) -> case leveled_rand:uniform(100) of N when N =< TimingProbability -> @@ -214,8 +212,8 @@ maybe_time({_Pid, TimingProbability}) -> end. -spec step_time( - os:timestamp()|no_timing) -> - {pos_integer(), os:timestamp()}|{no_timing, no_timing}. + erlang:timestamp()|no_timing) -> + {pos_integer(), erlang:timestamp()}|{no_timing, no_timing}. step_time(no_timing) -> {no_timing, no_timing}; step_time(TS) -> @@ -605,6 +603,8 @@ code_change(_OldVsn, State, _Extra) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + coverage_cheat_test() -> {ok, M} = monitor_start(1, []), timer:sleep(2000), diff --git a/src/leveled_pclerk.erl b/src/leveled_pclerk.erl index 1735962..4e5fc38 100644 --- a/src/leveled_pclerk.erl +++ b/src/leveled_pclerk.erl @@ -45,8 +45,6 @@ clerk_removelogs/2 ]). --include_lib("eunit/include/eunit.hrl"). - -define(MAX_TIMEOUT, 2000). -define(MIN_TIMEOUT, 200). -define(GROOMING_PERC, 50). @@ -178,8 +176,7 @@ code_change(_OldVsn, State, _Extra) -> -spec handle_work( {leveled_pmanifest:lsm_level(), leveled_pmanifest:manifest()}, string(), sst_options(), pid()) -> - {leveled_pmanifest:pos_integer(), - list(leveled_pmanifest:manifest_entry())}. + {pos_integer(), list(leveled_pmanifest:manifest_entry())}. handle_work( {SrcLevel, Manifest}, RootPath, SSTOpts, Owner) -> {UpdManifest, EntriesToDelete} = @@ -194,7 +191,7 @@ handle_work( {leveled_pmanifest:get_manifest_sqn(UpdManifest), EntriesToDelete}. -spec merge( - leveled_pmanifes:lsm_level(), leveled_pmanifest:manifest(), + leveled_pmanifest:lsm_level(), leveled_pmanifest:manifest(), string(), sst_options()) -> {leveled_pmanifest:manifest(), list(leveled_pmanifest:manifest_entry())}. @@ -359,6 +356,8 @@ return_deletions(ManifestSQN, PendingDeletionD) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + generate_randomkeys(Count, BucketRangeLow, BucketRangeHigh) -> generate_randomkeys(Count, [], BucketRangeLow, BucketRangeHigh). diff --git a/src/leveled_penciller.erl b/src/leveled_penciller.erl index 9991cca..3562e8c 100644 --- a/src/leveled_penciller.erl +++ b/src/leveled_penciller.erl @@ -201,10 +201,10 @@ sst_rootpath/1, sst_filename/3]). +-ifdef(TEST). -export([ clean_testdir/1]). - --include_lib("eunit/include/eunit.hrl"). +-endif. -define(MAX_WORK_WAIT, 300). -define(MANIFEST_FP, "ledger_manifest"). @@ -361,24 +361,6 @@ pcl_fetchlevelzero(Pid, Slot, ReturnFun) -> % be stuck in L0 pending gen_server:cast(Pid, {fetch_levelzero, Slot, ReturnFun}). --spec pcl_fetch(pid(), leveled_codec:ledger_key()) - -> leveled_codec:ledger_kv()|not_present. -%% @doc -%% Fetch a key, return the first (highest SQN) occurrence of that Key along -%% with the value. -%% -%% The Key needs to be hashable (i.e. have a tag which indicates that the key -%% can be looked up) - index entries are not hashable for example. -%% -%% If the hash is already known, call pcl_fetch/3 as segment_hash is a -%% relatively expensive hash function -pcl_fetch(Pid, Key) -> - Hash = leveled_codec:segment_hash(Key), - if - Hash /= no_lookup -> - gen_server:call(Pid, {fetch, Key, Hash, true}, infinity) - end. - -spec pcl_fetch(pid(), leveled_codec:ledger_key(), leveled_codec:segment_hash(), @@ -1457,7 +1439,7 @@ timed_fetch_mem(Key, Hash, Manifest, L0Cache, L0Index, Monitor) -> leveled_pmanifest:manifest(), list(), leveled_pmem:index_array()) -> - not_present|leveled_codec:ledger_kv()|leveled_codec:ledger_sqn(). + not_present|leveled_codec:ledger_kv()|leveled_codec:sqn(). %% @doc %% Fetch the result from the penciller, starting by looking in the memory, %% and if it is not found looking down level by level through the LSM tree. @@ -1560,8 +1542,12 @@ compare_to_sqn(Obj, SQN) -> %%%============================================================================ --spec keyfolder(list(), list(), tuple(), tuple(), - {pclacc_fun(), any(), pos_integer()}) -> any(). +-spec keyfolder( + {list(), list()}, + {leveled_codec:ledger_key(), leveled_codec:ledger_key()}, + {pclacc_fun(), any(), pos_integer()}, + {boolean(), {non_neg_integer(), pos_integer()|infinity}, integer()}) + -> any(). %% @doc %% The keyfolder will compare an iterator across the immutable in-memory cache %% of the Penciller (the IMMiter), with an iterator across the persisted part @@ -1579,12 +1565,6 @@ compare_to_sqn(Obj, SQN) -> %% To advance the SSTiter the find_nextkey/4 function is used, as the SSTiter %% is an iterator across multiple levels - and so needs to do its own %% comparisons to pop the next result. -keyfolder(IMMiter, SSTiter, StartKey, EndKey, {AccFun, Acc, Now}) -> - keyfolder({IMMiter, SSTiter}, - {StartKey, EndKey}, - {AccFun, Acc, Now}, - {false, {0, infinity}, -1}). - keyfolder(_Iterators, _KeyRange, {_AccFun, Acc, _Now}, @@ -1721,18 +1701,18 @@ maybe_accumulate(LK, LV, end. --spec find_nextkey(iterator(), - leveled_codec:ledger_key(), leveled_codec:ledger_key()) -> - no_more_keys|{iterator(), leveled_codec:ledger_kv()}. +-spec find_nextkey( + iterator(), + leveled_codec:ledger_key(), + leveled_codec:ledger_key(), + list(non_neg_integer())|false, + non_neg_integer()) + -> no_more_keys|{iterator(), leveled_codec:ledger_kv()}. %% @doc %% Looks to find the best choice for the next key across the levels (other %% than in-memory table) %% In finding the best choice, the next key in a given level may be a next %% block or next file pointer which will need to be expanded - -find_nextkey(QueryArray, StartKey, EndKey) -> - find_nextkey(QueryArray, StartKey, EndKey, false, 0). - find_nextkey(QueryArray, StartKey, EndKey, SegmentList, LowLastMod) -> find_nextkey(QueryArray, -1, @@ -1895,6 +1875,27 @@ maybelog_fetch_timing({Pid, _StatsFreq}, Level, FetchTime, _NF) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + +-spec pcl_fetch( + pid(), leveled_codec:ledger_key()) + -> leveled_codec:ledger_kv()|not_present. +pcl_fetch(Pid, Key) -> + Hash = leveled_codec:segment_hash(Key), + if + Hash /= no_lookup -> + gen_server:call(Pid, {fetch, Key, Hash, true}, infinity) + end. + +keyfolder(IMMiter, SSTiter, StartKey, EndKey, {AccFun, Acc, Now}) -> + keyfolder({IMMiter, SSTiter}, + {StartKey, EndKey}, + {AccFun, Acc, Now}, + {false, {0, infinity}, -1}). + +find_nextkey(QueryArray, StartKey, EndKey) -> + find_nextkey(QueryArray, StartKey, EndKey, false, 0). + generate_randomkeys({Count, StartSQN}) -> generate_randomkeys(Count, StartSQN, []). diff --git a/src/leveled_pmanifest.erl b/src/leveled_pmanifest.erl index 3f419ac..4ec6c2d 100644 --- a/src/leveled_pmanifest.erl +++ b/src/leveled_pmanifest.erl @@ -52,8 +52,6 @@ filepath/2 ]). --include_lib("eunit/include/eunit.hrl"). - -define(MANIFEST_FILEX, "man"). -define(PENDING_FILEX, "pnd"). -define(MANIFEST_FP, "ledger_manifest"). @@ -1037,6 +1035,8 @@ seconds_now() -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + initial_setup() -> initial_setup(single_change). diff --git a/src/leveled_pmem.erl b/src/leveled_pmem.erl index 44fde74..9e1b8a3 100644 --- a/src/leveled_pmem.erl +++ b/src/leveled_pmem.erl @@ -39,9 +39,7 @@ new_index/0, check_index/2, cache_full/1 - ]). - --include_lib("eunit/include/eunit.hrl"). + ]). -define(MAX_CACHE_LINES, 31). % Must be less than 128 @@ -238,6 +236,8 @@ check_slotlist(Key, _Hash, CheckList, TreeList) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + generate_randomkeys_aslist(Seqn, Count, BucketRangeLow, BucketRangeHigh) -> lists:ukeysort(1, generate_randomkeys(Seqn, diff --git a/src/leveled_runner.erl b/src/leveled_runner.erl index 1d8dcc7..88564ec 100644 --- a/src/leveled_runner.erl +++ b/src/leveled_runner.erl @@ -37,9 +37,6 @@ foldobjects_byindex/3 ]). - --include_lib("eunit/include/eunit.hrl"). - -define(CHECKJOURNAL_PROB, 0.2). -type key_range() @@ -65,6 +62,8 @@ :: fun(() -> foldacc()). -type acc_fun() :: fun((leveled_codec:key(), any(), foldacc()) -> foldacc()). +-type mp() + :: {re_pattern, term(), term(), term(), term()}. %%%============================================================================ @@ -139,7 +138,7 @@ bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc, MaxBuckets) -> -spec index_query(snap_fun(), {leveled_codec:ledger_key(), leveled_codec:ledger_key(), - {boolean(), undefined|re:mp()|iodata()}}, + {boolean(), undefined|mp()|iodata()}}, {fold_keys_fun(), foldacc()}) -> {async, runner_fun()}. %% @doc @@ -805,6 +804,8 @@ wrap_runner(FoldAction, AfterAction) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + %% Note in OTP 22 we see a compile error with the assertException if using the %% eqc_cover parse_transform. This test is excluded in the eqc profle, due to %% this error diff --git a/src/leveled_sst.erl b/src/leveled_sst.erl index cee6bb6..d2e83c9 100644 --- a/src/leveled_sst.erl +++ b/src/leveled_sst.erl @@ -95,8 +95,6 @@ -define(START_OPTS, [{hibernate_after, ?HIBERNATE_TIMEOUT}]). --include_lib("eunit/include/eunit.hrl"). - -export([init/1, callback_mode/0, terminate/3, @@ -125,12 +123,8 @@ sst_gettombcount/1, sst_close/1]). --ifdef(TEST). - -export([sst_newmerge/10]). --endif. - -export([tune_seglist/1, extract_hash/1, member_check/2]). -export([in_range/3]). @@ -1057,20 +1051,6 @@ expand_list_by_pointer({next, ManEntry, StartKey, EndKey}, ExpPointer ++ Tail. --spec sst_getkvrange(pid(), - range_endpoint(), - range_endpoint(), - integer()) - -> list(leveled_codec:ledger_kv()|slot_pointer()). -%% @doc -%% Get a range of {Key, Value} pairs as a list between StartKey and EndKey -%% (inclusive). The ScanWidth is the maximum size of the range, a pointer -%% will be placed on the tail of the resulting list if results expand beyond -%% the Scan Width -sst_getkvrange(Pid, StartKey, EndKey, ScanWidth) -> - sst_getfilteredrange(Pid, StartKey, EndKey, ScanWidth, false, 0). - - -spec sst_getfilteredrange(pid(), range_endpoint(), range_endpoint(), @@ -1108,14 +1088,6 @@ sst_getfilteredrange(Pid, StartKey, EndKey, ScanWidth, SegList, LowLastMod) -> Reply end. --spec sst_getslots(pid(), list(slot_pointer())) - -> list(leveled_codec:ledger_kv()). -%% @doc -%% Get a list of slots by their ID. The slot will be converted from the binary -%% to term form outside of the FSM loop, this is to stop the copying of the -%% converted term to the calling process. -sst_getslots(Pid, SlotList) -> - sst_getfilteredslots(Pid, SlotList, false, 0). -spec sst_getfilteredslots(pid(), list(slot_pointer()), @@ -3101,8 +3073,32 @@ maybelog_fetch_timing({Pid, _SlotFreq}, Level, Type, SW) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + -define(TEST_AREA, "test/test_area/"). +-spec sst_getkvrange(pid(), + range_endpoint(), + range_endpoint(), + integer()) + -> list(leveled_codec:ledger_kv()|slot_pointer()). +%% @doc +%% Get a range of {Key, Value} pairs as a list between StartKey and EndKey +%% (inclusive). The ScanWidth is the maximum size of the range, a pointer +%% will be placed on the tail of the resulting list if results expand beyond +%% the Scan Width +sst_getkvrange(Pid, StartKey, EndKey, ScanWidth) -> + sst_getfilteredrange(Pid, StartKey, EndKey, ScanWidth, false, 0). + +-spec sst_getslots(pid(), list(slot_pointer())) + -> list(leveled_codec:ledger_kv()). +%% @doc +%% Get a list of slots by their ID. The slot will be converted from the binary +%% to term form outside of the FSM loop, this is to stop the copying of the +%% converted term to the calling process. +sst_getslots(Pid, SlotList) -> + sst_getfilteredslots(Pid, SlotList, false, 0). + testsst_new(RootPath, Filename, Level, KVList, MaxSQN, PressMethod) -> OptsSST = #sst_options{press_method=PressMethod, diff --git a/src/leveled_tictac.erl b/src/leveled_tictac.erl index 827fd84..551ad85 100644 --- a/src/leveled_tictac.erl +++ b/src/leveled_tictac.erl @@ -80,9 +80,6 @@ tictac_hash/2 % called by kv_index_tictactree ]). - --include_lib("eunit/include/eunit.hrl"). - -define(HASH_SIZE, 4). -define(L2_CHUNKSIZE, 256). -define(L2_BITSIZE, 8). @@ -562,6 +559,22 @@ segmentcompare(SrcBin, SnkBin, Acc, Counter) -> segmentcompare(SrcTail, SnkTail, [Counter|Acc], Counter + 1) end. +merge_binaries(BinA, BinB) -> + BitSize = bit_size(BinA), + BitSize = bit_size(BinB), + <> = BinA, + <> = BinB, + MergedInt = AInt bxor BInt, + <>. + +%%%============================================================================ +%%% Test +%%%============================================================================ + +-ifdef(TEST). + +-include_lib("eunit/include/eunit.hrl"). + checktree(TicTacTree) -> checktree(TicTacTree#tictactree.level1, TicTacTree, 0). @@ -581,21 +594,6 @@ segmentsummarise(L2Bin, L1Acc) -> <> = L2Bin, segmentsummarise(Tail, L1Acc bxor TopHash). -merge_binaries(BinA, BinB) -> - BitSize = bit_size(BinA), - BitSize = bit_size(BinB), - <> = BinA, - <> = BinB, - MergedInt = AInt bxor BInt, - <>. - -%%%============================================================================ -%%% Test -%%%============================================================================ - --ifdef(TEST). - - simple_bysize_test_() -> {timeout, 60, fun simple_bysize_test_allsizes/0}. diff --git a/src/leveled_tree.erl b/src/leveled_tree.erl index e3b8982..9435e9f 100644 --- a/src/leveled_tree.erl +++ b/src/leveled_tree.erl @@ -26,8 +26,6 @@ empty/1 ]). --include_lib("eunit/include/eunit.hrl"). - -define(SKIP_WIDTH, 16). -type tree_type() :: tree|idxt|skpl. @@ -570,6 +568,8 @@ iterator(nil, As) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) -> generate_randomkeys(Seqn, Count, @@ -700,19 +700,29 @@ tolist_test_by_type(Type) -> T_Reverse = to_list(T), ?assertMatch(KL, T_Reverse). -tree_timing_test() -> + +timing_tests_tree_test_() -> + {timeout, 60, fun tree_timing/0}. + +timing_tests_idxt_test_() -> + {timeout, 60, fun idxt_timing/0}. + +timing_tests_skpl_test_() -> + {timeout, 60, fun skpl_timing/0}. + +tree_timing() -> log_tree_test_by_(16, tree, 8000), log_tree_test_by_(16, tree, 4000), log_tree_test_by_(4, tree, 256). -idxt_timing_test() -> +idxt_timing() -> log_tree_test_by_(16, idxt, 8000), log_tree_test_by_(16, idxt, 4000), log_tree_test_by_(4, idxt, 256), log_tree_test_by_(16, idxt, 256), log_tree_test_by_simplekey_(16, idxt, 256). -skpl_timing_test() -> +skpl_timing() -> log_tree_test_by_(auto, skpl, 8000), log_tree_test_by_(auto, skpl, 4000), log_tree_test_by_simplekey_(auto, skpl, 4000), diff --git a/src/leveled_util.erl b/src/leveled_util.erl index d654669..3eb6eed 100644 --- a/src/leveled_util.erl +++ b/src/leveled_util.erl @@ -5,12 +5,8 @@ -module(leveled_util). - -include("include/leveled.hrl"). --include_lib("eunit/include/eunit.hrl"). - - -export([generate_uuid/0, integer_now/0, integer_time/1, @@ -94,6 +90,8 @@ safe_rename(TempFN, RealFN, BinData, ReadCheck) -> -ifdef(TEST). +-include_lib("eunit/include/eunit.hrl"). + -define(TEST_AREA, "test/test_area/util/"). magichashperf_test() ->