diff --git a/priv/leveled.schema b/priv/leveled.schema index f27d88e..de05d31 100644 --- a/priv/leveled.schema +++ b/priv/leveled.schema @@ -81,13 +81,18 @@ ]}. %% @doc Log level -%% Can be debug, info, warn, error or critical +%% Can be debug, info, warning, error or critical %% Set the minimum log level to be used within leveled. Leveled will log many %% lines to allow for stats to be etracted by those using log indexers such as -%% Splunk +%% Splunk. +%% Logging will be via erlang logger, and so the destination will be defined +%% by the configured log handlers of the erlang node which starts the bookie. +%% All logs are from the leveled_log module, and so specific handling can be +%% managed using filters on the first element of the MFS metadata, or setting +%% the log level for that specific module. {mapping, "leveled.log_level", "leveled.log_level", [ {default, info}, - {datatype, atom} + {datatype, {enum, [debug, info, warning, error, critical]}} ]}. %% @doc The approximate size (in bytes) when a Journal file should be rolled. diff --git a/src/leveled.app.src b/src/leveled.app.src index 3b0c091..86bf423 100644 --- a/src/leveled.app.src +++ b/src/leveled.app.src @@ -4,10 +4,10 @@ {vsn, git}, {registered, []}, {applications, [ - lz4, - zstd, kernel, - stdlib + stdlib, + lz4, + zstd ]}, {maintainers, ["Martin Sumner"]}, {licenses, ["Apache"]}, diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index 429480a..a0f8ba7 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -38,7 +38,7 @@ -behaviour(gen_server). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([init/1, handle_call/3, diff --git a/src/leveled_cdb.erl b/src/leveled_cdb.erl index 24ecb5f..6c1f6a6 100644 --- a/src/leveled_cdb.erl +++ b/src/leveled_cdb.erl @@ -42,13 +42,11 @@ %% The first word is the corresponding hash value and the second word is a %% file pointer to the actual {key,value} tuple higher in the file. %% -%% - -module(leveled_cdb). -behaviour(gen_statem). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([init/1, callback_mode/0, diff --git a/src/leveled_codec.erl b/src/leveled_codec.erl index 633b1e7..bb69d81 100644 --- a/src/leveled_codec.erl +++ b/src/leveled_codec.erl @@ -8,7 +8,7 @@ -module(leveled_codec). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ inker_reload_strategy/1, diff --git a/src/leveled_head.erl b/src/leveled_head.erl index 0773f72..b4bd6a3 100644 --- a/src/leveled_head.erl +++ b/src/leveled_head.erl @@ -16,7 +16,7 @@ -module(leveled_head). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([key_to_canonicalbinary/1, build_head/2, diff --git a/src/leveled_iclerk.erl b/src/leveled_iclerk.erl index 8b2c4b6..e241348 100644 --- a/src/leveled_iclerk.erl +++ b/src/leveled_iclerk.erl @@ -72,7 +72,7 @@ -behaviour(gen_server). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([init/1, handle_call/3, diff --git a/src/leveled_imanifest.erl b/src/leveled_imanifest.erl index 4248beb..480c197 100644 --- a/src/leveled_imanifest.erl +++ b/src/leveled_imanifest.erl @@ -1,11 +1,8 @@ %% -------- Inker Manifest --------- %% - -module(leveled_imanifest). --include("include/leveled.hrl"). - -export([ generate_entry/1, add_entry/3, diff --git a/src/leveled_inker.erl b/src/leveled_inker.erl index bd8387b..6ad5ced 100644 --- a/src/leveled_inker.erl +++ b/src/leveled_inker.erl @@ -80,12 +80,11 @@ %% TODO: how to instruct the files to close is tbd %% - -module(leveled_inker). -behaviour(gen_server). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([init/1, handle_call/3, diff --git a/src/leveled_log.erl b/src/leveled_log.erl index dfdc43e..f698201 100644 --- a/src/leveled_log.erl +++ b/src/leveled_log.erl @@ -3,7 +3,7 @@ -module(leveled_log). --include("include/leveled.hrl"). +-include_lib("kernel/include/logger.hrl"). -export([log/2, log_timer/3, @@ -17,21 +17,17 @@ save/1, return_settings/0]). --ifdef(TEST). --export([format_time/1, log_prefix/5]). --endif. - -record(log_options, {log_level = info :: log_level(), forced_logs = [] :: [atom()], database_id = 0 :: non_neg_integer()}). --type log_level() :: debug | info | warn | error | critical. +-type log_level() :: debug | info | warning | error | critical. -type log_options() :: #log_options{}. -export_type([log_options/0, log_level/0]). --define(LOG_LEVELS, [debug, info, warn, error, critical]). +-define(LOG_LEVELS, [debug, info, warning, error, critical]). -define(DEFAULT_LOG_LEVEL, error). -define(LOGBASE, @@ -49,7 +45,7 @@ b0003 => {info, <<"Bookie closing for reason ~w">>}, b0004 => - {warn, <<"Bookie snapshot exiting as master store ~w is down for reason ~p">>}, + {warning, <<"Bookie snapshot exiting as master store ~w is down for reason ~p">>}, b0005 => {info, <<"LedgerSQN=~w at startup">>}, b0006 => @@ -59,9 +55,9 @@ b0009 => {debug, <<"Bucket list finds Bucket ~w">>}, b0011 => - {warn, <<"Call to destroy the store and so all files to be removed">>}, + {warning, <<"Call to destroy the store and so all files to be removed">>}, b0013 => - {warn, <<"Long running task took ~w microseconds with task_type=~w">>}, + {warning, <<"Long running task took ~w microseconds with task_type=~w">>}, b0015 => {info, <<"Put timing with sample_count=~w ink_time=~w prep_time=~w mem_time=~w with total_object_size=~w with sample_period=~w seconds">>}, b0016 => @@ -71,9 +67,9 @@ b0018 => {info, <<"Positive HEAD responses timed with sample_count=~w and cache_count=~w found_count=~w fetch_ledger_time=~w fetch_ledgercache_time=~w rsp_time=~w notfound_time=~w with sample_period=~w seconds">>}, b0019 => - {warn, <<"Use of book_indexfold with constraint of Bucket ~w with no StartKey is deprecated">>}, + {warning, <<"Use of book_indexfold with constraint of Bucket ~w with no StartKey is deprecated">>}, b0020 => - {warn, <<"Ratio of penciller cache size ~w to bookie's memory cache size ~w is larger than expected">>}, + {warning, <<"Ratio of penciller cache size ~w to bookie's memory cache size ~w is larger than expected">>}, r0001 => {debug, <<"Object fold to process batch of ~w objects">>}, p0001 => @@ -113,7 +109,7 @@ p0029 => {info, <<"L0 completion confirmed and will transition to not pending">>}, p0030 => - {warn, <<"We're doomed - intention recorded to destroy all files">>}, + {warning, <<"We're doomed - intention recorded to destroy all files">>}, p0031 => {info, <<"Completion of update to levelzero with cache_size=~w level0_due=~w change_pending=~w MinSQN=~w MaxSQN=~w">>}, p0032 => @@ -151,7 +147,7 @@ pc012 => {debug, <<"File to be created as part of MSN=~w Filename=~s IsBasement=~w">>}, pc013 => - {warn, <<"Merge resulted in empty file ~s">>}, + {warning, <<"Merge resulted in empty file ~s">>}, pc015 => {info, <<"File created">>}, pc016 => @@ -179,7 +175,7 @@ sst04 => {debug, <<"Exit called for reason ~w on filename ~s">>}, sst05 => - {warn, <<"Rename rogue filename ~s to ~s">>}, + {warning, <<"Rename rogue filename ~s to ~s">>}, sst06 => {debug, <<"File ~s has been set for delete">>}, sst07 => @@ -187,7 +183,7 @@ sst08 => {info, <<"Completed creation of ~s at level ~w with max sqn ~w">>}, sst09 => - {warn, <<"Read request exposes slot with bad CRC">>}, + {warning, <<"Read request exposes slot with bad CRC">>}, sst10 => {debug, <<"Expansion sought to support pointer to pid ~w status ~w">>}, sst11 => @@ -233,7 +229,7 @@ i0017 => {debug, <<"At SQN=~w journal has filename ~s">>}, i0018 => - {warn, <<"We're doomed - intention recorded to destroy all files">>}, + {warning, <<"We're doomed - intention recorded to destroy all files">>}, i0020 => {info, <<"Journal backup completed to path=~s with file_count=~w">>}, i0021 => @@ -245,7 +241,7 @@ i0024 => {info, <<"Prompted roll at NewSQN=~w">>}, i0025 => - {warn, <<"Journal SQN of ~w is below Ledger SQN of ~w anti-entropy will be required">>}, + {warning, <<"Journal SQN of ~w is below Ledger SQN of ~w anti-entropy will be required">>}, i0026 => {info, <<"Deferring shutdown due to snapshot_count=~w">>}, i0027 => @@ -275,9 +271,9 @@ ic011 => {info, <<"Not clearing filename ~s as modified delta is only ~w seconds">>}, ic012 => - {warn, <<"Tag ~w not found in Strategy ~w - maybe corrupted">>}, + {warning, <<"Tag ~w not found in Strategy ~w - maybe corrupted">>}, ic013 => - {warn, "File with name ~s to be ignored in manifest as scanning for first key returned empty - maybe corrupted"}, + {warning, "File with name ~s to be ignored in manifest as scanning for first key returned empty - maybe corrupted"}, ic014 => {info, <<"Compaction to be run with strategy ~w and max_run_length ~w">>}, cdb01 => @@ -291,7 +287,7 @@ cdb05 => {info, <<"Closing of filename ~s from state ~w for reason ~w">>}, cdb06 => - {warn, <<"File to be truncated at last position of ~w with end of file at ~w">>}, + {warning, <<"File to be truncated at last position of ~w with end of file at ~w">>}, cdb07 => {info, <<"Hashtree index computed">>}, cdb08 => @@ -299,7 +295,7 @@ cdb09 => {info, <<"Failure to read Key/Value at Position ~w in scan this may be the end of the file">>}, cdb10 => - {warn, <<"CRC check failed due to error=~s">>}, + {warning, <<"CRC check failed due to error=~s">>}, cdb12 => {info, <<"Hashtree index written">>}, cdb13 => @@ -313,9 +309,9 @@ cdb19 => {info, <<"Sample timings in microseconds for sample_count=~w with totals of cycle_count=~w index_time=~w read_time=~w with sample_period=~w seconds">>}, cdb20 => - {warn, <<"Error ~w caught when safe reading a file to length ~w">>}, + {warning, <<"Error ~w caught when safe reading a file to length ~w">>}, cdb21 => - {warn, <<"File ~s to be deleted but already gone">>} + {warning, <<"File ~s to be deleted but already gone">>} }). @@ -375,8 +371,10 @@ get_opts() -> #log_options{} = LO -> LO; _ -> - #log_options{log_level = ?DEFAULT_LOG_LEVEL, - forced_logs = []} + #log_options{ + log_level = ?DEFAULT_LOG_LEVEL, + forced_logs = [] + } end. -spec return_settings() -> {log_level(), list(string())}. @@ -401,10 +399,14 @@ log(LogRef, Subs, SupportedLogLevels) -> true -> DBid = LogOpts#log_options.database_id, Prefix = - log_prefix( - localtime_ms(), LogLevel, LogRef, DBid, self()), + log_prefix(LogRef, DBid, self()), Suffix = <<"~n">>, - io:format(iolist_to_binary([Prefix, Log, Suffix]), Subs); + ?LOG( + LogLevel, + unicode:characters_to_list([Prefix, Log, Suffix]), + Subs, + #{log_type => backend} + ); false -> ok end. @@ -437,13 +439,15 @@ log_timer(LogRef, Subs, StartTime, SupportedLevels) -> true -> DBid = LogOpts#log_options.database_id, Prefix = - log_prefix( - localtime_ms(), LogLevel, LogRef, DBid, self()), + log_prefix(LogRef, DBid, self()), Suffix = <<"~n">>, Duration = duration_text(StartTime), - io:format( - iolist_to_binary([Prefix, Log, Duration, Suffix]), - Subs); + ?LOG( + LogLevel, + unicode:characters_to_list([Prefix, Log, Duration, Suffix]), + Subs, + #{log_type => backend} + ); false -> ok end. @@ -458,30 +462,11 @@ log_randomtimer(LogReference, Subs, StartTime, RandomProb) -> ok end. -localtime_ms() -> - {_, _, Micro} = Now = os:timestamp(), - {Date, {Hours, Minutes, Seconds}} = calendar:now_to_local_time(Now), - {Date, {Hours, Minutes, Seconds, Micro div 1000 rem 1000}}. - --spec log_prefix( - tuple(), atom(), atom(), non_neg_integer(), pid()) -> io_lib:chars(). -log_prefix({{Y, M, D}, {H, Mi, S, Ms}}, LogLevel, LogRef, DBid, Pid) -> - [integer_to_list(Y), $-, i2l(M), $-, i2l(D), - $T, i2l(H), $:, i2l(Mi), $:, i2l(S), $., i3l(Ms), - " log_level=", atom_to_list(LogLevel), " log_ref=", atom_to_list(LogRef), - " db_id=", integer_to_list(DBid), " pid=", pid_to_list(Pid), " "]. - --spec i2l(non_neg_integer()) -> list(). -i2l(I) when I < 10 -> - [$0, $0+I]; -i2l(I) -> - integer_to_list(I). - --spec i3l(non_neg_integer()) -> list(). -i3l(I) when I < 100 -> - [$0 | i2l(I)]; -i3l(I) -> - integer_to_list(I). +-spec log_prefix(atom(), non_neg_integer(), pid()) -> io_lib:chars(). +log_prefix(LogRef, DBid, Pid) -> + ["log_ref=", atom_to_list(LogRef), + " db_id=", integer_to_list(DBid), + " pid=", pid_to_list(Pid), " "]. -spec duration_text(erlang:timestamp()) -> io_lib:chars(). duration_text(StartTime) -> @@ -503,31 +488,14 @@ duration_text(StartTime) -> should_i_log(LogLevel, Levels, LogRef) -> should_i_log(LogLevel, Levels, LogRef, get_opts()). - -format_time({{Y, M, D}, {H, Mi, S, Ms}}) -> - io_lib:format("~b-~2..0b-~2..0b", [Y, M, D]) ++ "T" ++ - io_lib:format("~2..0b:~2..0b:~2..0b.~3..0b", [H, Mi, S, Ms]). - -prefix_compare_test() -> - Time = localtime_ms(), - DBid = 64, - LogLevel = info, - LogRef = b0001, - {TS0, OldTS} = - timer:tc(?MODULE, format_time, [Time]), - {TS1, NewPrefix} = - timer:tc(?MODULE, log_prefix, [Time, LogLevel, LogRef, DBid, self()]), - {NewTS, _Rest} = lists:split(23, lists:flatten(NewPrefix)), - ?assertMatch(OldTS, NewTS), - io:format(user, "~nTimestamp timings old ~w new ~w~n", [TS0, TS1]). - + log_test() -> log(d0001, []), log_timer(d0001, [], os:timestamp()). -log_warn_test() -> - ok = log(g0001, [], [warn, error]), - ok = log_timer(g0001, [], os:timestamp(), [warn, error]). +log_warning_test() -> + ok = log(g0001, [], [warning, error]), + ok = log_timer(g0001, [], os:timestamp(), [warning, error]). shouldilog_test() -> ok = set_loglevel(debug), @@ -547,41 +515,5 @@ badloglevel_test() -> ?assertMatch(true, is_active_level(?LOG_LEVELS, debug, unsupported)), ?assertMatch(true, is_active_level(?LOG_LEVELS, critical, unsupported)). -timing_test() -> - % Timing test - % Previous LOGBASE used list with string-based keys and values - % The size of the LOGBASE was 19,342 words (>150KB), and logs took - % o(100) microseconds. - % Changing the LOGBASE ot a map with binary-based keys and values does not - % appear to improve the speed of logging, but does reduce the size of the - % LOGBASE to just over 2,000 words (so an order of magnitude improvement) - timer:sleep(10), - io:format(user, "Log timings:~n", []), - io:format(user, "Logbase size ~w~n", [erts_debug:flat_size(?LOGBASE)]), - io:format( - user, - "Front log timing ~p~n", - [timer:tc(fun() -> log(cdb21, ["test_file"]) end)] - ), - io:format( - user, - "Mid log timing ~p~n", - [timer:tc(fun() -> log(pc013, ["test_file"]) end)] - ), - io:format( - user, - "End log timing ~p~n", - [timer:tc(fun() -> log(b0003, ["testing"]) end)] - ), - io:format( - user, - "Big log timing ~p~n", - [timer:tc(fun() -> log(sst13, [100,100,100,100,true,1]) end)] - ), - io:format( - user, - "Timer log timing ~p~n", - [timer:tc(fun() -> log_timer(pc015, [], os:timestamp()) end)] - ). -endif. diff --git a/src/leveled_pclerk.erl b/src/leveled_pclerk.erl index 50786e4..dcc3812 100644 --- a/src/leveled_pclerk.erl +++ b/src/leveled_pclerk.erl @@ -18,12 +18,11 @@ %% garbage is considered acceptable. %% - -module(leveled_pclerk). -behaviour(gen_server). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ init/1, diff --git a/src/leveled_penciller.erl b/src/leveled_penciller.erl index cbcfa8b..64c325a 100644 --- a/src/leveled_penciller.erl +++ b/src/leveled_penciller.erl @@ -154,7 +154,6 @@ %% the current level zero in-memory view. %% - -module(leveled_penciller). -behaviour(gen_server). diff --git a/src/leveled_pmanifest.erl b/src/leveled_pmanifest.erl index bfbd086..35f437a 100644 --- a/src/leveled_pmanifest.erl +++ b/src/leveled_pmanifest.erl @@ -14,10 +14,9 @@ %% access the cache) %% - Use a skiplist like enhanced list at lower levels. - -module(leveled_pmanifest). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ new_manifest/0, diff --git a/src/leveled_pmem.erl b/src/leveled_pmem.erl index 2f5bb88..ce7b9e4 100644 --- a/src/leveled_pmem.erl +++ b/src/leveled_pmem.erl @@ -26,7 +26,7 @@ -module(leveled_pmem). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ prepare_for_index/2, diff --git a/src/leveled_runner.erl b/src/leveled_runner.erl index a9d337a..32fe5f1 100644 --- a/src/leveled_runner.erl +++ b/src/leveled_runner.erl @@ -16,10 +16,9 @@ %% check that the item is available in the Journal via the Inker as part of %% the fold. This may be useful for anti-entropy folds - -module(leveled_runner). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ bucket_sizestats/3, diff --git a/src/leveled_sst.erl b/src/leveled_sst.erl index d82cdaf..5647274 100644 --- a/src/leveled_sst.erl +++ b/src/leveled_sst.erl @@ -61,7 +61,7 @@ -behaviour(gen_statem). --include("include/leveled.hrl"). +-include("leveled.hrl"). -define(LOOK_SLOTSIZE, 128). % Maximum of 128 -define(LOOK_BLOCKSIZE, {24, 32}). % 4x + y = ?LOOK_SLOTSIZE diff --git a/src/leveled_tictac.erl b/src/leveled_tictac.erl index c65053d..de30d79 100644 --- a/src/leveled_tictac.erl +++ b/src/leveled_tictac.erl @@ -49,7 +49,6 @@ %% version of the segment-leaf hash from the previous level 1 hash). %% - -module(leveled_tictac). -export([ diff --git a/src/leveled_tree.erl b/src/leveled_tree.erl index 9435e9f..d824c9f 100644 --- a/src/leveled_tree.erl +++ b/src/leveled_tree.erl @@ -10,7 +10,7 @@ -module(leveled_tree). --include("include/leveled.hrl"). +-include("leveled.hrl"). -export([ from_orderedlist/2, diff --git a/src/leveled_util.erl b/src/leveled_util.erl index 9e622ef..ac8c439 100644 --- a/src/leveled_util.erl +++ b/src/leveled_util.erl @@ -17,7 +17,6 @@ -define(WRITE_OPS, [binary, raw, read, write]). - -spec generate_uuid() -> list(). %% @doc %% Generate a new globally unique ID as a string. diff --git a/test/end_to_end/appdefined_SUITE.erl b/test/end_to_end/appdefined_SUITE.erl index 9a8cf39..4c508a0 100644 --- a/test/end_to_end/appdefined_SUITE.erl +++ b/test/end_to_end/appdefined_SUITE.erl @@ -1,7 +1,6 @@ -module(appdefined_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). --export([all/0]). +-include("leveled.hrl"). +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([ application_defined_tag/1, bespoketag_recalc/1 @@ -12,7 +11,12 @@ all() -> [ bespoketag_recalc ]. +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "appdefined"}|Config]), + Config. +end_per_suite(Config) -> + testutil:end_per_suite(Config). application_defined_tag(_Config) -> T1 = os:timestamp(), diff --git a/test/end_to_end/basic_SUITE.erl b/test/end_to_end/basic_SUITE.erl index a166b2a..22c7b91 100644 --- a/test/end_to_end/basic_SUITE.erl +++ b/test/end_to_end/basic_SUITE.erl @@ -1,7 +1,6 @@ -module(basic_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). --export([all/0]). +-include("leveled.hrl"). +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([simple_put_fetch_head_delete/1, many_put_fetch_head/1, journal_compaction/1, @@ -36,6 +35,13 @@ all() -> [ ]. +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "basic"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). + simple_put_fetch_head_delete(_Config) -> io:format("simple test with info and no forced logs~n"), simple_test_withlog(info, []), diff --git a/test/end_to_end/iterator_SUITE.erl b/test/end_to_end/iterator_SUITE.erl index ef5efdf..15236f6 100644 --- a/test/end_to_end/iterator_SUITE.erl +++ b/test/end_to_end/iterator_SUITE.erl @@ -1,11 +1,10 @@ -module(iterator_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). +-include("leveled.hrl"). -define(KEY_ONLY, {false, undefined}). --export([all/0]). +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([expiring_indexes/1, breaking_folds/1, single_object_with2i/1, @@ -26,6 +25,12 @@ all() -> [ foldobjects_bybucket_range ]. +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "iterator"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). expiring_indexes(_Config) -> % Add objects to the store with index entries, where the objects (and hence diff --git a/test/end_to_end/perf_SUITE.erl b/test/end_to_end/perf_SUITE.erl index bdd7dd8..c4d40f3 100644 --- a/test/end_to_end/perf_SUITE.erl +++ b/test/end_to_end/perf_SUITE.erl @@ -1,7 +1,9 @@ -module(perf_SUITE). --include("../include/leveled.hrl"). + +-include("leveled.hrl"). + -define(INFO, info). --export([all/0, suite/0]). +-export([all/0, suite/0, init_per_suite/1, end_per_suite/1]). -export([ riak_ctperf/1, riak_fullperf/1, riak_profileperf/1, riak_miniperf/1 ]). @@ -25,6 +27,13 @@ all() -> [?performance]. suite() -> [{timetrap, {hours, 16}}]. +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "perf"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). + riak_fullperf(_Config) -> riak_fullperf(2048, zstd, as_store). diff --git a/test/end_to_end/recovery_SUITE.erl b/test/end_to_end/recovery_SUITE.erl index 8a74ac1..5b35970 100644 --- a/test/end_to_end/recovery_SUITE.erl +++ b/test/end_to_end/recovery_SUITE.erl @@ -1,7 +1,8 @@ -module(recovery_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). --export([all/0]). + +-include("leveled.hrl"). + +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([ recovery_with_samekeyupdates/1, same_key_rotation_withindexes/1, @@ -38,6 +39,12 @@ all() -> [ replace_everything ]. +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "recovery"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). replace_everything(_Config) -> % See https://github.com/martinsumner/leveled/issues/389 diff --git a/test/end_to_end/riak_SUITE.erl b/test/end_to_end/riak_SUITE.erl index 9041983..b2c84d1 100644 --- a/test/end_to_end/riak_SUITE.erl +++ b/test/end_to_end/riak_SUITE.erl @@ -1,7 +1,8 @@ -module(riak_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). --export([all/0]). + +-include("leveled.hrl"). + +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([ basic_riak/1, fetchclocks_modifiedbetween/1, @@ -26,6 +27,12 @@ all() -> [ -define(MAGIC, 53). % riak_kv -> riak_object +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "riak"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). basic_riak(_Config) -> basic_riak_tester(<<"B0">>, 640000), @@ -65,38 +72,48 @@ basic_riak_tester(Bucket, KeyCount) -> CountPerList = KeyCount div 5, ObjList1 = - testutil:generate_objects(CountPerList, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(512), - IndexGenFun(1), - Bucket), + testutil:generate_objects( + CountPerList, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(512), + IndexGenFun(1), + Bucket + ), ObjList2 = - testutil:generate_objects(CountPerList, - {fixed_binary, CountPerList + 1}, [], - leveled_rand:rand_bytes(512), - IndexGenFun(2), - Bucket), + testutil:generate_objects( + CountPerList, + {fixed_binary, CountPerList + 1}, [], + leveled_rand:rand_bytes(512), + IndexGenFun(2), + Bucket + ), ObjList3 = - testutil:generate_objects(CountPerList, - {fixed_binary, 2 * CountPerList + 1}, [], - leveled_rand:rand_bytes(512), - IndexGenFun(3), - Bucket), + testutil:generate_objects( + CountPerList, + {fixed_binary, 2 * CountPerList + 1}, [], + leveled_rand:rand_bytes(512), + IndexGenFun(3), + Bucket + ), ObjList4 = - testutil:generate_objects(CountPerList, - {fixed_binary, 3 * CountPerList + 1}, [], - leveled_rand:rand_bytes(512), - IndexGenFun(4), - Bucket), + testutil:generate_objects( + CountPerList, + {fixed_binary, 3 * CountPerList + 1}, [], + leveled_rand:rand_bytes(512), + IndexGenFun(4), + Bucket + ), ObjList5 = - testutil:generate_objects(CountPerList, - {fixed_binary, 4 * CountPerList + 1}, [], - leveled_rand:rand_bytes(512), - IndexGenFun(5), - Bucket), + testutil:generate_objects( + CountPerList, + {fixed_binary, 4 * CountPerList + 1}, [], + leveled_rand:rand_bytes(512), + IndexGenFun(5), + Bucket + ), % Mix with the ordering on the load, just in case ordering hides issues testutil:riakload(Bookie1, ObjList4), @@ -124,13 +141,13 @@ basic_riak_tester(Bucket, KeyCount) -> ID = integer_to_list(Idx), Index = list_to_binary("integer" ++ ID ++ "_int"), {async, R} = - leveled_bookie:book_indexfold(Book, - {Bucket, <<>>}, - {FoldKeysFun, []}, - {Index, - IC, - IC}, - {true, undefined}), + leveled_bookie:book_indexfold( + Book, + {Bucket, <<>>}, + {FoldKeysFun, []}, + {Index, IC, IC}, + {true, undefined} + ), KTL = R(), CountAcc + length(KTL) end @@ -141,13 +158,13 @@ basic_riak_tester(Bucket, KeyCount) -> ID = integer_to_list(Idx), Index = list_to_binary("binary" ++ ID ++ "_bin"), {async, R} = - leveled_bookie:book_indexfold(Book, - {Bucket, <<>>}, - {FoldKeysFun, []}, - {Index, - <>, - <>}, - {true, undefined}), + leveled_bookie:book_indexfold( + Book, + {Bucket, <<>>}, + {FoldKeysFun, []}, + {Index, <>, <>}, + {true, undefined} + ), KTL = R(), CountAcc + length(KTL) end @@ -215,11 +232,13 @@ basic_riak_tester(Bucket, KeyCount) -> [Obj1, Obj2, Obj3, Obj4, Obj5, Obj2L]), {async, HeadR} = - leveled_bookie:book_headfold(Bookie2, - ?RIAK_TAG, - {HeadFoldFun, []}, - true, false, - SegList), + leveled_bookie:book_headfold( + Bookie2, + ?RIAK_TAG, + {HeadFoldFun, []}, + true, false, + SegList + ), SW_SL0 = os:timestamp(), KLBySeg = HeadR(), io:format("SegList Headfold returned ~w heads in ~w ms~n", @@ -230,12 +249,13 @@ basic_riak_tester(Bucket, KeyCount) -> true = length(KLBySeg) - length(KLBySegRem) == length(BKList), {async, HeadRFalsePositive} = - leveled_bookie:book_headfold(Bookie2, - ?RIAK_TAG, - {HeadFoldFun, []}, - true, false, - SegList ++ lists:seq(1, 256)), - % Make it a large seg list + leveled_bookie:book_headfold( + Bookie2, + ?RIAK_TAG, + {HeadFoldFun, []}, + true, false, + SegList ++ lists:seq(1, 256) + ), % Make it a large seg list SW_SL1 = os:timestamp(), KLByXcessSeg = HeadRFalsePositive(), io:format("SegList Headfold with xcess segments returned ~w heads in ~w ms~n", @@ -452,74 +472,88 @@ fetchclocks_modifiedbetween(_Config) -> {ok, Bookie1B} = leveled_bookie:book_start(StartOpts1B), ObjList0 = - testutil:generate_objects(100000, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(32), - fun() -> [] end, - <<"BaselineB">>), + testutil:generate_objects( + 100000, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(32), + fun() -> [] end, + <<"BaselineB">> + ), ObjL1StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList1 = - testutil:generate_objects(20000, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B0">>), + testutil:generate_objects( + 20000, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B0">> + ), timer:sleep(1000), ObjL1EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), _ObjL2StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList2 = - testutil:generate_objects(15000, - {fixed_binary, 20001}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B0">>), + testutil:generate_objects( + 15000, + {fixed_binary, 20001}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B0">> + ), timer:sleep(1000), _ObjList2EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), ObjL3StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList3 = - testutil:generate_objects(35000, - {fixed_binary, 35001}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B0">>), + testutil:generate_objects( + 35000, + {fixed_binary, 35001}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B0">> + ), timer:sleep(1000), ObjL3EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), ObjL4StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList4 = - testutil:generate_objects(30000, - {fixed_binary, 70001}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B0">>), + testutil:generate_objects( + 30000, + {fixed_binary, 70001}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B0">> + ), timer:sleep(1000), _ObjL4EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), ObjL5StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList5 = - testutil:generate_objects(8000, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B1">>), + testutil:generate_objects( + 8000, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B1">> + ), timer:sleep(1000), _ObjL5EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), ObjL6StartTS = testutil:convert_to_seconds(os:timestamp()), ObjList6 = - testutil:generate_objects(7000, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(512), - fun() -> [] end, - <<"B2">>), + testutil:generate_objects( + 7000, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(512), + fun() -> [] end, + <<"B2">> + ), timer:sleep(1000), ObjL6EndTS = testutil:convert_to_seconds(os:timestamp()), timer:sleep(1000), @@ -561,16 +595,17 @@ fetchclocks_modifiedbetween(_Config) -> fun(FoldTarget, ModRange, EndNumber, MaxCount) -> fun(_I, {LKN, KC}) -> {async, Runner} = - leveled_bookie:book_headfold(FoldTarget, - ?RIAK_TAG, - KeyRangeFun(LKN + 1, - EndNumber), - {StoreFoldFun, {LKN, KC}}, - false, - true, - false, - ModRange, - MaxCount), + leveled_bookie:book_headfold( + FoldTarget, + ?RIAK_TAG, + KeyRangeFun(LKN + 1, EndNumber), + {StoreFoldFun, {LKN, KC}}, + false, + true, + false, + ModRange, + MaxCount + ), {_, {LKN0, KC0}} = Runner(), {LKN0, KC0} end @@ -640,15 +675,15 @@ fetchclocks_modifiedbetween(_Config) -> NoFilterStart = os:timestamp(), {async, R3A_NoFilterRunner} = - leveled_bookie:book_headfold(Bookie1A, - ?RIAK_TAG, - KeyRangeFun(1, 100000), - {CrudeStoreFoldFun(ObjL1StartTS, - ObjL1EndTS), - {0, 0}}, - false, - true, - false), + leveled_bookie:book_headfold( + Bookie1A, + ?RIAK_TAG, + KeyRangeFun(1, 100000), + {CrudeStoreFoldFun(ObjL1StartTS, ObjL1EndTS), {0, 0}}, + false, + true, + false + ), R3A_NoFilter = R3A_NoFilterRunner(), NoFilterTime = timer:now_diff(os:timestamp(), NoFilterStart) div 1000, io:format("R3A_NoFilter ~w~n", [R3A_NoFilter]), @@ -668,88 +703,99 @@ fetchclocks_modifiedbetween(_Config) -> end, {async, R4A_MultiBucketRunner} = - leveled_bookie:book_headfold(Bookie1A, - ?RIAK_TAG, - {bucket_list, [<<"B0">>, <<"B2">>]}, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - % Range includes ObjjL5 LMDs, - % but these ar enot in bucket list - false), + leveled_bookie:book_headfold( + Bookie1A, + ?RIAK_TAG, + {bucket_list, [<<"B0">>, <<"B2">>]}, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + % Range includes ObjjL5 LMDs, + % but these ar enot in bucket list + false + ), R4A_MultiBucket = R4A_MultiBucketRunner(), io:format("R4A_MultiBucket ~w ~n", [R4A_MultiBucket]), true = R4A_MultiBucket == 37000, {async, R5A_MultiBucketRunner} = - leveled_bookie:book_headfold(Bookie1A, - ?RIAK_TAG, - {bucket_list, [<<"B2">>, <<"B0">>]}, - % Reverse the buckets in the bucket - % list - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1A, + ?RIAK_TAG, + {bucket_list, [<<"B2">>, <<"B0">>]}, + % Reverse the buckets in the bucket + % list + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), R5A_MultiBucket = R5A_MultiBucketRunner(), io:format("R5A_MultiBucket ~w ~n", [R5A_MultiBucket]), true = R5A_MultiBucket == 37000, {async, R5B_MultiBucketRunner} = - leveled_bookie:book_headfold(Bookie1B, - ?RIAK_TAG, - {bucket_list, - [<<"BaselineB">>, <<"B2">>, <<"B0">>]}, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1B, + ?RIAK_TAG, + {bucket_list, [<<"BaselineB">>, <<"B2">>, <<"B0">>]}, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), R5B_MultiBucket = R5B_MultiBucketRunner(), io:format("R5B_MultiBucket ~w ~n", [R5B_MultiBucket]), true = R5B_MultiBucket == 37000, testutil:update_some_objects(Bookie1A, ObjList1, 1000), - R6A_PlusFilter = lists:foldl(FoldRangesFun(Bookie1A, - {ObjL1StartTS, ObjL1EndTS}, - 100000, - 100000), - {0, 0}, lists:seq(1, 1)), + R6A_PlusFilter = + lists:foldl( + FoldRangesFun( + Bookie1A, {ObjL1StartTS, ObjL1EndTS}, 100000, 100000 + ), + {0, 0}, + lists:seq(1, 1)), io:format("R6A_PlusFilter ~w~n", [R6A_PlusFilter]), true = 19000 == element(2, R6A_PlusFilter), % Hit limit of max count before trying next bucket, with and without a % timestamp filter {async, R7A_MultiBucketRunner} = - leveled_bookie:book_headfold(Bookie1A, - ?RIAK_TAG, - {bucket_list, [<<"B1">>, <<"B2">>]}, - {SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL5StartTS, ObjL6EndTS}, - 5000), + leveled_bookie:book_headfold( + Bookie1A, + ?RIAK_TAG, + {bucket_list, [<<"B1">>, <<"B2">>]}, + {SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL5StartTS, ObjL6EndTS}, + 5000 + ), R7A_MultiBucket = R7A_MultiBucketRunner(), io:format("R7A_MultiBucket ~w ~n", [R7A_MultiBucket]), true = R7A_MultiBucket == {0, 5000}, {async, R8A_MultiBucketRunner} = - leveled_bookie:book_headfold(Bookie1A, - ?RIAK_TAG, - {bucket_list, [<<"B1">>, <<"B2">>]}, - {SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, - false, - true, - false, - false, - 5000), + leveled_bookie:book_headfold( + Bookie1A, + ?RIAK_TAG, + {bucket_list, [<<"B1">>, <<"B2">>]}, + {SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, + false, + true, + false, + false, + 5000 + ), R8A_MultiBucket = R8A_MultiBucketRunner(), io:format("R8A_MultiBucket ~w ~n", [R8A_MultiBucket]), true = R8A_MultiBucket == {0, 5000}, @@ -766,11 +812,13 @@ fetchclocks_modifiedbetween(_Config) -> io:format("Push tested keys down levels with new objects~n"), ObjList7 = - testutil:generate_objects(200000, - {fixed_binary, 1}, [], - leveled_rand:rand_bytes(32), - fun() -> [] end, - <<"B1.9">>), + testutil:generate_objects( + 200000, + {fixed_binary, 1}, [], + leveled_rand:rand_bytes(32), + fun() -> [] end, + <<"B1.9">> + ), testutil:riakload(Bookie1BS, ObjList7), lmdrange_tester(Bookie1BS, SimpleCountFun, @@ -794,28 +842,32 @@ time_filtered_query(FoldRangesFun, Bookie, ObjL1StartTS, ObjL1EndTS) -> lmdrange_tester(Bookie1BS, SimpleCountFun, ObjL4StartTS, ObjL6StartTS, ObjL6EndTS, TooLate) -> {async, R5B_MultiBucketRunner0} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - all, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + all, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), R5B_MultiBucket0 = R5B_MultiBucketRunner0(), io:format("R5B_MultiBucket0 ~w ~n", [R5B_MultiBucket0]), true = R5B_MultiBucket0 == 37000, {async, R5B_MultiBucketRunner1} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - all, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + all, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), R5B_MultiBucket1 = R5B_MultiBucketRunner1(), io:format("R5B_MultiBucket1 ~w ~n", [R5B_MultiBucket1]), true = R5B_MultiBucket1 == 37000, @@ -829,40 +881,46 @@ lmdrange_tester(Bookie1BS, SimpleCountFun, end end, {async, R5B_MultiBucketRunner2} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - {bucket_list, [<<"B0">>, <<"B2">>]}, - {SimpleMinMaxFun, []}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + {bucket_list, [<<"B0">>, <<"B2">>]}, + {SimpleMinMaxFun, []}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), [{<<"B0">>, MinB0K, MaxB0K}, {<<"B2">>, MinB2K, MaxB2K}] = R5B_MultiBucketRunner2(), io:format("Found Min and Max Keys~n"), io:format("B ~s MinK ~s MaxK ~s~n", [<<"B0">>, MinB0K, MaxB0K]), io:format("B ~s MinK ~s MaxK ~s~n", [<<"B2">>, MinB2K, MaxB2K]), {async, R5B_MultiBucketRunner3a} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - {range, <<"B0">>, {MinB0K, MaxB0K}}, - {SimpleCountFun([<<"B0">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + {range, <<"B0">>, {MinB0K, MaxB0K}}, + {SimpleCountFun([<<"B0">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), {async, R5B_MultiBucketRunner3b} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - {range, <<"B2">>, {MinB2K, MaxB2K}}, - {SimpleCountFun([<<"B2">>]), 0}, - false, - true, - false, - {ObjL4StartTS, ObjL6EndTS}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + {range, <<"B2">>, {MinB2K, MaxB2K}}, + {SimpleCountFun([<<"B2">>]), 0}, + false, + true, + false, + {ObjL4StartTS, ObjL6EndTS}, + false + ), R5B_MultiBucket3a = R5B_MultiBucketRunner3a(), io:format("R5B_MultiBucket3a ~w ~n", [R5B_MultiBucket3a]), R5B_MultiBucket3b = R5B_MultiBucketRunner3b(), @@ -871,32 +929,33 @@ lmdrange_tester(Bookie1BS, SimpleCountFun, io:format("Query outside of time range~n"), {async, R5B_MultiBucketRunner4} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - all, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL6EndTS, - TooLate}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + all, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL6EndTS, TooLate}, + false + ), R5B_MultiBucket4 = R5B_MultiBucketRunner4(), io:format("R5B_MultiBucket4 ~w ~n", [R5B_MultiBucket4]), true = R5B_MultiBucket4 == 0, io:format("Query with one foot inside of time range~n"), {async, R5B_MultiBucketRunner5} = - leveled_bookie:book_headfold(Bookie1BS, - ?RIAK_TAG, - all, - {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, - false, - true, - false, - {ObjL6StartTS, - TooLate}, - false), + leveled_bookie:book_headfold( + Bookie1BS, + ?RIAK_TAG, + all, + {SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, + false, + true, + false, + {ObjL6StartTS, TooLate}, + false), R5B_MultiBucket5 = R5B_MultiBucketRunner5(), io:format("R5B_MultiBucket5 ~w ~n", [R5B_MultiBucket5]), true = R5B_MultiBucket5 == 7000. @@ -1310,11 +1369,13 @@ handoff(_Config) -> % Handoff the data from the first store to the other three stores {async, Handoff2} = - leveled_bookie:book_objectfold(Bookie1, - ?RIAK_TAG, - {FoldObjectsFun(Bookie2), ok}, - false, - key_order), + leveled_bookie:book_objectfold( + Bookie1, + ?RIAK_TAG, + {FoldObjectsFun(Bookie2), ok}, + false, + key_order + ), SW2 = os:timestamp(), ok = Handoff2(), Time_HO2 = timer:now_diff(os:timestamp(), SW2)/1000, @@ -1322,22 +1383,26 @@ handoff(_Config) -> [Time_HO2]), SW3 = os:timestamp(), {async, Handoff3} = - leveled_bookie:book_objectfold(Bookie1, - ?RIAK_TAG, - {FoldObjectsFun(Bookie3), ok}, - true, - sqn_order), + leveled_bookie:book_objectfold( + Bookie1, + ?RIAK_TAG, + {FoldObjectsFun(Bookie3), ok}, + true, + sqn_order + ), ok = Handoff3(), Time_HO3 = timer:now_diff(os:timestamp(), SW3)/1000, io:format("Handoff to Book3 in sqn_order took ~w milliseconds ~n", [Time_HO3]), SW4 = os:timestamp(), {async, Handoff4} = - leveled_bookie:book_objectfold(Bookie1, - ?RIAK_TAG, - {FoldObjectsFun(Bookie4), ok}, - true, - sqn_order), + leveled_bookie:book_objectfold( + Bookie1, + ?RIAK_TAG, + {FoldObjectsFun(Bookie4), ok}, + true, + sqn_order + ), ok = Handoff4(), Time_HO4 = timer:now_diff(os:timestamp(), SW4)/1000, diff --git a/test/end_to_end/testutil.erl b/test/end_to_end/testutil.erl index 611f20f..82c0235 100644 --- a/test/end_to_end/testutil.erl +++ b/test/end_to_end/testutil.erl @@ -2,6 +2,8 @@ -include("../include/leveled.hrl"). +-export([init_per_suite/1, end_per_suite/1]). + -export([book_riakput/3, book_tempriakput/4, book_riakdelete/4, @@ -91,6 +93,59 @@ updatemetadata=dict:store(clean, true, dict:new()), updatevalue :: term()}). + +init_per_suite(Config) -> + LogTemplate = [time, " log_level=", level, " ", msg, "\n"], + LogFormatter = + { + logger_formatter, + #{ + time_designator => $\s, + template => LogTemplate + } + }, + {suite, SUITEName} = lists:keyfind(suite, 1, Config), + FileName = "leveled_" ++ SUITEName ++ "_ct.log", + LogConfig = + #{ + config => + #{ + file => FileName, + max_no_files => 5 + } + }, + + LogFilter = + fun(LogEvent, LogType) -> + Meta = maps:get(meta, LogEvent), + case maps:get(log_type, Meta, not_found) of + LogType -> + LogEvent; + _ -> + ignore + end + end, + + ok = logger:add_handler(logfile, logger_std_h, LogConfig), + ok = logger:set_handler_config(logfile, formatter, LogFormatter), + ok = logger:set_handler_config(logfile, level, info), + ok = logger:add_handler_filter(logfile, type_filter, {LogFilter, backend}), + + ok = logger:set_handler_config(default, level, notice), + ok = logger:set_handler_config(cth_log_redirect, level, notice), + + ok = logger:set_primary_config(level, info), + + Config. + +end_per_suite(_Config) -> + ok = logger:remove_handler(logfile), + ok = logger:set_primary_config(level, notice), + ok = logger:set_handler_config(default, level, all), + ok = logger:set_handler_config(cth_log_redirect, level, all), + + ok. + riak_object(Bucket, Key, Value, MetaData) -> Content = #r_content{metadata=dict:from_list(MetaData), value=Value}, Obj = #r_object{bucket=Bucket, diff --git a/test/end_to_end/tictac_SUITE.erl b/test/end_to_end/tictac_SUITE.erl index 37b5e1a..6b38bba 100644 --- a/test/end_to_end/tictac_SUITE.erl +++ b/test/end_to_end/tictac_SUITE.erl @@ -1,7 +1,6 @@ -module(tictac_SUITE). --include_lib("common_test/include/ct.hrl"). --include("include/leveled.hrl"). --export([all/0]). +-include("leveled.hrl"). +-export([all/0, init_per_suite/1, end_per_suite/1]). -export([ many_put_compare/1, index_compare/1, @@ -16,11 +15,18 @@ all() -> [ tuplebuckets_headonly ]. --define(LMD_FORMAT, "~4..0w~2..0w~2..0w~2..0w~2..0w"). -define(V1_VERS, 1). -define(MAGIC, 53). % riak_kv -> riak_object +init_per_suite(Config) -> + testutil:init_per_suite([{suite, "tictac"}|Config]), + Config. + +end_per_suite(Config) -> + testutil:end_per_suite(Config). + many_put_compare(_Config) -> + TreeSize = small, SegmentCount = 256 * 256, % Test requires multiple different databases, so want to mount them all