Switch to logger (#442)

* Switch to logger

Use logger rather than io:format when logging.  The ct tests have besn switched to log to file, testutil/init_per_suite/1 may offer useful guidance on configuring logger with leveled.

As all logs are produced by the leveled_log module, the MFA metadata is uninteresting for log outputs, but can be used for explicit filter controls for leveled logs.

* iolist_to_binary not unicode_binary()

logger filters will be error and be removed if the format line is a binary().  Must be either a charlist() or a unicode_binary() - so iolist_to_binary() can't be used

* Add metadata for filter

* Update test/end_to_end/tictac_SUITE.erl

Co-authored-by: Thomas Arts <thomas.arts@quviq.com>

---------

Co-authored-by: Thomas Arts <thomas.arts@quviq.com>
This commit is contained in:
Martin Sumner 2024-09-06 11:18:24 +01:00 committed by GitHub
parent 5db277b82d
commit 54e3096020
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
27 changed files with 492 additions and 410 deletions

View file

@ -81,13 +81,18 @@
]}. ]}.
%% @doc Log level %% @doc Log level
%% Can be debug, info, warn, error or critical %% Can be debug, info, warning, error or critical
%% Set the minimum log level to be used within leveled. Leveled will log many %% Set the minimum log level to be used within leveled. Leveled will log many
%% lines to allow for stats to be etracted by those using log indexers such as %% lines to allow for stats to be etracted by those using log indexers such as
%% Splunk %% Splunk.
%% Logging will be via erlang logger, and so the destination will be defined
%% by the configured log handlers of the erlang node which starts the bookie.
%% All logs are from the leveled_log module, and so specific handling can be
%% managed using filters on the first element of the MFS metadata, or setting
%% the log level for that specific module.
{mapping, "leveled.log_level", "leveled.log_level", [ {mapping, "leveled.log_level", "leveled.log_level", [
{default, info}, {default, info},
{datatype, atom} {datatype, {enum, [debug, info, warning, error, critical]}}
]}. ]}.
%% @doc The approximate size (in bytes) when a Journal file should be rolled. %% @doc The approximate size (in bytes) when a Journal file should be rolled.

View file

@ -4,10 +4,10 @@
{vsn, git}, {vsn, git},
{registered, []}, {registered, []},
{applications, [ {applications, [
lz4,
zstd,
kernel, kernel,
stdlib stdlib,
lz4,
zstd
]}, ]},
{maintainers, ["Martin Sumner"]}, {maintainers, ["Martin Sumner"]},
{licenses, ["Apache"]}, {licenses, ["Apache"]},

View file

@ -38,7 +38,7 @@
-behaviour(gen_server). -behaviour(gen_server).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([init/1, -export([init/1,
handle_call/3, handle_call/3,

View file

@ -42,13 +42,11 @@
%% The first word is the corresponding hash value and the second word is a %% The first word is the corresponding hash value and the second word is a
%% file pointer to the actual {key,value} tuple higher in the file. %% file pointer to the actual {key,value} tuple higher in the file.
%% %%
%%
-module(leveled_cdb). -module(leveled_cdb).
-behaviour(gen_statem). -behaviour(gen_statem).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([init/1, -export([init/1,
callback_mode/0, callback_mode/0,

View file

@ -8,7 +8,7 @@
-module(leveled_codec). -module(leveled_codec).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
inker_reload_strategy/1, inker_reload_strategy/1,

View file

@ -16,7 +16,7 @@
-module(leveled_head). -module(leveled_head).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([key_to_canonicalbinary/1, -export([key_to_canonicalbinary/1,
build_head/2, build_head/2,

View file

@ -72,7 +72,7 @@
-behaviour(gen_server). -behaviour(gen_server).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([init/1, -export([init/1,
handle_call/3, handle_call/3,

View file

@ -1,11 +1,8 @@
%% -------- Inker Manifest --------- %% -------- Inker Manifest ---------
%% %%
-module(leveled_imanifest). -module(leveled_imanifest).
-include("include/leveled.hrl").
-export([ -export([
generate_entry/1, generate_entry/1,
add_entry/3, add_entry/3,

View file

@ -80,12 +80,11 @@
%% TODO: how to instruct the files to close is tbd %% TODO: how to instruct the files to close is tbd
%% %%
-module(leveled_inker). -module(leveled_inker).
-behaviour(gen_server). -behaviour(gen_server).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([init/1, -export([init/1,
handle_call/3, handle_call/3,

View file

@ -3,7 +3,7 @@
-module(leveled_log). -module(leveled_log).
-include("include/leveled.hrl"). -include_lib("kernel/include/logger.hrl").
-export([log/2, -export([log/2,
log_timer/3, log_timer/3,
@ -17,21 +17,17 @@
save/1, save/1,
return_settings/0]). return_settings/0]).
-ifdef(TEST).
-export([format_time/1, log_prefix/5]).
-endif.
-record(log_options, -record(log_options,
{log_level = info :: log_level(), {log_level = info :: log_level(),
forced_logs = [] :: [atom()], forced_logs = [] :: [atom()],
database_id = 0 :: non_neg_integer()}). database_id = 0 :: non_neg_integer()}).
-type log_level() :: debug | info | warn | error | critical. -type log_level() :: debug | info | warning | error | critical.
-type log_options() :: #log_options{}. -type log_options() :: #log_options{}.
-export_type([log_options/0, log_level/0]). -export_type([log_options/0, log_level/0]).
-define(LOG_LEVELS, [debug, info, warn, error, critical]). -define(LOG_LEVELS, [debug, info, warning, error, critical]).
-define(DEFAULT_LOG_LEVEL, error). -define(DEFAULT_LOG_LEVEL, error).
-define(LOGBASE, -define(LOGBASE,
@ -49,7 +45,7 @@
b0003 => b0003 =>
{info, <<"Bookie closing for reason ~w">>}, {info, <<"Bookie closing for reason ~w">>},
b0004 => b0004 =>
{warn, <<"Bookie snapshot exiting as master store ~w is down for reason ~p">>}, {warning, <<"Bookie snapshot exiting as master store ~w is down for reason ~p">>},
b0005 => b0005 =>
{info, <<"LedgerSQN=~w at startup">>}, {info, <<"LedgerSQN=~w at startup">>},
b0006 => b0006 =>
@ -59,9 +55,9 @@
b0009 => b0009 =>
{debug, <<"Bucket list finds Bucket ~w">>}, {debug, <<"Bucket list finds Bucket ~w">>},
b0011 => b0011 =>
{warn, <<"Call to destroy the store and so all files to be removed">>}, {warning, <<"Call to destroy the store and so all files to be removed">>},
b0013 => b0013 =>
{warn, <<"Long running task took ~w microseconds with task_type=~w">>}, {warning, <<"Long running task took ~w microseconds with task_type=~w">>},
b0015 => b0015 =>
{info, <<"Put timing with sample_count=~w ink_time=~w prep_time=~w mem_time=~w with total_object_size=~w with sample_period=~w seconds">>}, {info, <<"Put timing with sample_count=~w ink_time=~w prep_time=~w mem_time=~w with total_object_size=~w with sample_period=~w seconds">>},
b0016 => b0016 =>
@ -71,9 +67,9 @@
b0018 => b0018 =>
{info, <<"Positive HEAD responses timed with sample_count=~w and cache_count=~w found_count=~w fetch_ledger_time=~w fetch_ledgercache_time=~w rsp_time=~w notfound_time=~w with sample_period=~w seconds">>}, {info, <<"Positive HEAD responses timed with sample_count=~w and cache_count=~w found_count=~w fetch_ledger_time=~w fetch_ledgercache_time=~w rsp_time=~w notfound_time=~w with sample_period=~w seconds">>},
b0019 => b0019 =>
{warn, <<"Use of book_indexfold with constraint of Bucket ~w with no StartKey is deprecated">>}, {warning, <<"Use of book_indexfold with constraint of Bucket ~w with no StartKey is deprecated">>},
b0020 => b0020 =>
{warn, <<"Ratio of penciller cache size ~w to bookie's memory cache size ~w is larger than expected">>}, {warning, <<"Ratio of penciller cache size ~w to bookie's memory cache size ~w is larger than expected">>},
r0001 => r0001 =>
{debug, <<"Object fold to process batch of ~w objects">>}, {debug, <<"Object fold to process batch of ~w objects">>},
p0001 => p0001 =>
@ -113,7 +109,7 @@
p0029 => p0029 =>
{info, <<"L0 completion confirmed and will transition to not pending">>}, {info, <<"L0 completion confirmed and will transition to not pending">>},
p0030 => p0030 =>
{warn, <<"We're doomed - intention recorded to destroy all files">>}, {warning, <<"We're doomed - intention recorded to destroy all files">>},
p0031 => p0031 =>
{info, <<"Completion of update to levelzero with cache_size=~w level0_due=~w change_pending=~w MinSQN=~w MaxSQN=~w">>}, {info, <<"Completion of update to levelzero with cache_size=~w level0_due=~w change_pending=~w MinSQN=~w MaxSQN=~w">>},
p0032 => p0032 =>
@ -151,7 +147,7 @@
pc012 => pc012 =>
{debug, <<"File to be created as part of MSN=~w Filename=~s IsBasement=~w">>}, {debug, <<"File to be created as part of MSN=~w Filename=~s IsBasement=~w">>},
pc013 => pc013 =>
{warn, <<"Merge resulted in empty file ~s">>}, {warning, <<"Merge resulted in empty file ~s">>},
pc015 => pc015 =>
{info, <<"File created">>}, {info, <<"File created">>},
pc016 => pc016 =>
@ -179,7 +175,7 @@
sst04 => sst04 =>
{debug, <<"Exit called for reason ~w on filename ~s">>}, {debug, <<"Exit called for reason ~w on filename ~s">>},
sst05 => sst05 =>
{warn, <<"Rename rogue filename ~s to ~s">>}, {warning, <<"Rename rogue filename ~s to ~s">>},
sst06 => sst06 =>
{debug, <<"File ~s has been set for delete">>}, {debug, <<"File ~s has been set for delete">>},
sst07 => sst07 =>
@ -187,7 +183,7 @@
sst08 => sst08 =>
{info, <<"Completed creation of ~s at level ~w with max sqn ~w">>}, {info, <<"Completed creation of ~s at level ~w with max sqn ~w">>},
sst09 => sst09 =>
{warn, <<"Read request exposes slot with bad CRC">>}, {warning, <<"Read request exposes slot with bad CRC">>},
sst10 => sst10 =>
{debug, <<"Expansion sought to support pointer to pid ~w status ~w">>}, {debug, <<"Expansion sought to support pointer to pid ~w status ~w">>},
sst11 => sst11 =>
@ -233,7 +229,7 @@
i0017 => i0017 =>
{debug, <<"At SQN=~w journal has filename ~s">>}, {debug, <<"At SQN=~w journal has filename ~s">>},
i0018 => i0018 =>
{warn, <<"We're doomed - intention recorded to destroy all files">>}, {warning, <<"We're doomed - intention recorded to destroy all files">>},
i0020 => i0020 =>
{info, <<"Journal backup completed to path=~s with file_count=~w">>}, {info, <<"Journal backup completed to path=~s with file_count=~w">>},
i0021 => i0021 =>
@ -245,7 +241,7 @@
i0024 => i0024 =>
{info, <<"Prompted roll at NewSQN=~w">>}, {info, <<"Prompted roll at NewSQN=~w">>},
i0025 => i0025 =>
{warn, <<"Journal SQN of ~w is below Ledger SQN of ~w anti-entropy will be required">>}, {warning, <<"Journal SQN of ~w is below Ledger SQN of ~w anti-entropy will be required">>},
i0026 => i0026 =>
{info, <<"Deferring shutdown due to snapshot_count=~w">>}, {info, <<"Deferring shutdown due to snapshot_count=~w">>},
i0027 => i0027 =>
@ -275,9 +271,9 @@
ic011 => ic011 =>
{info, <<"Not clearing filename ~s as modified delta is only ~w seconds">>}, {info, <<"Not clearing filename ~s as modified delta is only ~w seconds">>},
ic012 => ic012 =>
{warn, <<"Tag ~w not found in Strategy ~w - maybe corrupted">>}, {warning, <<"Tag ~w not found in Strategy ~w - maybe corrupted">>},
ic013 => ic013 =>
{warn, "File with name ~s to be ignored in manifest as scanning for first key returned empty - maybe corrupted"}, {warning, "File with name ~s to be ignored in manifest as scanning for first key returned empty - maybe corrupted"},
ic014 => ic014 =>
{info, <<"Compaction to be run with strategy ~w and max_run_length ~w">>}, {info, <<"Compaction to be run with strategy ~w and max_run_length ~w">>},
cdb01 => cdb01 =>
@ -291,7 +287,7 @@
cdb05 => cdb05 =>
{info, <<"Closing of filename ~s from state ~w for reason ~w">>}, {info, <<"Closing of filename ~s from state ~w for reason ~w">>},
cdb06 => cdb06 =>
{warn, <<"File to be truncated at last position of ~w with end of file at ~w">>}, {warning, <<"File to be truncated at last position of ~w with end of file at ~w">>},
cdb07 => cdb07 =>
{info, <<"Hashtree index computed">>}, {info, <<"Hashtree index computed">>},
cdb08 => cdb08 =>
@ -299,7 +295,7 @@
cdb09 => cdb09 =>
{info, <<"Failure to read Key/Value at Position ~w in scan this may be the end of the file">>}, {info, <<"Failure to read Key/Value at Position ~w in scan this may be the end of the file">>},
cdb10 => cdb10 =>
{warn, <<"CRC check failed due to error=~s">>}, {warning, <<"CRC check failed due to error=~s">>},
cdb12 => cdb12 =>
{info, <<"Hashtree index written">>}, {info, <<"Hashtree index written">>},
cdb13 => cdb13 =>
@ -313,9 +309,9 @@
cdb19 => cdb19 =>
{info, <<"Sample timings in microseconds for sample_count=~w with totals of cycle_count=~w index_time=~w read_time=~w with sample_period=~w seconds">>}, {info, <<"Sample timings in microseconds for sample_count=~w with totals of cycle_count=~w index_time=~w read_time=~w with sample_period=~w seconds">>},
cdb20 => cdb20 =>
{warn, <<"Error ~w caught when safe reading a file to length ~w">>}, {warning, <<"Error ~w caught when safe reading a file to length ~w">>},
cdb21 => cdb21 =>
{warn, <<"File ~s to be deleted but already gone">>} {warning, <<"File ~s to be deleted but already gone">>}
}). }).
@ -375,8 +371,10 @@ get_opts() ->
#log_options{} = LO -> #log_options{} = LO ->
LO; LO;
_ -> _ ->
#log_options{log_level = ?DEFAULT_LOG_LEVEL, #log_options{
forced_logs = []} log_level = ?DEFAULT_LOG_LEVEL,
forced_logs = []
}
end. end.
-spec return_settings() -> {log_level(), list(string())}. -spec return_settings() -> {log_level(), list(string())}.
@ -401,10 +399,14 @@ log(LogRef, Subs, SupportedLogLevels) ->
true -> true ->
DBid = LogOpts#log_options.database_id, DBid = LogOpts#log_options.database_id,
Prefix = Prefix =
log_prefix( log_prefix(LogRef, DBid, self()),
localtime_ms(), LogLevel, LogRef, DBid, self()),
Suffix = <<"~n">>, Suffix = <<"~n">>,
io:format(iolist_to_binary([Prefix, Log, Suffix]), Subs); ?LOG(
LogLevel,
unicode:characters_to_list([Prefix, Log, Suffix]),
Subs,
#{log_type => backend}
);
false -> false ->
ok ok
end. end.
@ -437,13 +439,15 @@ log_timer(LogRef, Subs, StartTime, SupportedLevels) ->
true -> true ->
DBid = LogOpts#log_options.database_id, DBid = LogOpts#log_options.database_id,
Prefix = Prefix =
log_prefix( log_prefix(LogRef, DBid, self()),
localtime_ms(), LogLevel, LogRef, DBid, self()),
Suffix = <<"~n">>, Suffix = <<"~n">>,
Duration = duration_text(StartTime), Duration = duration_text(StartTime),
io:format( ?LOG(
iolist_to_binary([Prefix, Log, Duration, Suffix]), LogLevel,
Subs); unicode:characters_to_list([Prefix, Log, Duration, Suffix]),
Subs,
#{log_type => backend}
);
false -> false ->
ok ok
end. end.
@ -458,30 +462,11 @@ log_randomtimer(LogReference, Subs, StartTime, RandomProb) ->
ok ok
end. end.
localtime_ms() -> -spec log_prefix(atom(), non_neg_integer(), pid()) -> io_lib:chars().
{_, _, Micro} = Now = os:timestamp(), log_prefix(LogRef, DBid, Pid) ->
{Date, {Hours, Minutes, Seconds}} = calendar:now_to_local_time(Now), ["log_ref=", atom_to_list(LogRef),
{Date, {Hours, Minutes, Seconds, Micro div 1000 rem 1000}}. " db_id=", integer_to_list(DBid),
" pid=", pid_to_list(Pid), " "].
-spec log_prefix(
tuple(), atom(), atom(), non_neg_integer(), pid()) -> io_lib:chars().
log_prefix({{Y, M, D}, {H, Mi, S, Ms}}, LogLevel, LogRef, DBid, Pid) ->
[integer_to_list(Y), $-, i2l(M), $-, i2l(D),
$T, i2l(H), $:, i2l(Mi), $:, i2l(S), $., i3l(Ms),
" log_level=", atom_to_list(LogLevel), " log_ref=", atom_to_list(LogRef),
" db_id=", integer_to_list(DBid), " pid=", pid_to_list(Pid), " "].
-spec i2l(non_neg_integer()) -> list().
i2l(I) when I < 10 ->
[$0, $0+I];
i2l(I) ->
integer_to_list(I).
-spec i3l(non_neg_integer()) -> list().
i3l(I) when I < 100 ->
[$0 | i2l(I)];
i3l(I) ->
integer_to_list(I).
-spec duration_text(erlang:timestamp()) -> io_lib:chars(). -spec duration_text(erlang:timestamp()) -> io_lib:chars().
duration_text(StartTime) -> duration_text(StartTime) ->
@ -503,31 +488,14 @@ duration_text(StartTime) ->
should_i_log(LogLevel, Levels, LogRef) -> should_i_log(LogLevel, Levels, LogRef) ->
should_i_log(LogLevel, Levels, LogRef, get_opts()). should_i_log(LogLevel, Levels, LogRef, get_opts()).
format_time({{Y, M, D}, {H, Mi, S, Ms}}) ->
io_lib:format("~b-~2..0b-~2..0b", [Y, M, D]) ++ "T" ++
io_lib:format("~2..0b:~2..0b:~2..0b.~3..0b", [H, Mi, S, Ms]).
prefix_compare_test() ->
Time = localtime_ms(),
DBid = 64,
LogLevel = info,
LogRef = b0001,
{TS0, OldTS} =
timer:tc(?MODULE, format_time, [Time]),
{TS1, NewPrefix} =
timer:tc(?MODULE, log_prefix, [Time, LogLevel, LogRef, DBid, self()]),
{NewTS, _Rest} = lists:split(23, lists:flatten(NewPrefix)),
?assertMatch(OldTS, NewTS),
io:format(user, "~nTimestamp timings old ~w new ~w~n", [TS0, TS1]).
log_test() -> log_test() ->
log(d0001, []), log(d0001, []),
log_timer(d0001, [], os:timestamp()). log_timer(d0001, [], os:timestamp()).
log_warn_test() -> log_warning_test() ->
ok = log(g0001, [], [warn, error]), ok = log(g0001, [], [warning, error]),
ok = log_timer(g0001, [], os:timestamp(), [warn, error]). ok = log_timer(g0001, [], os:timestamp(), [warning, error]).
shouldilog_test() -> shouldilog_test() ->
ok = set_loglevel(debug), ok = set_loglevel(debug),
@ -547,41 +515,5 @@ badloglevel_test() ->
?assertMatch(true, is_active_level(?LOG_LEVELS, debug, unsupported)), ?assertMatch(true, is_active_level(?LOG_LEVELS, debug, unsupported)),
?assertMatch(true, is_active_level(?LOG_LEVELS, critical, unsupported)). ?assertMatch(true, is_active_level(?LOG_LEVELS, critical, unsupported)).
timing_test() ->
% Timing test
% Previous LOGBASE used list with string-based keys and values
% The size of the LOGBASE was 19,342 words (>150KB), and logs took
% o(100) microseconds.
% Changing the LOGBASE ot a map with binary-based keys and values does not
% appear to improve the speed of logging, but does reduce the size of the
% LOGBASE to just over 2,000 words (so an order of magnitude improvement)
timer:sleep(10),
io:format(user, "Log timings:~n", []),
io:format(user, "Logbase size ~w~n", [erts_debug:flat_size(?LOGBASE)]),
io:format(
user,
"Front log timing ~p~n",
[timer:tc(fun() -> log(cdb21, ["test_file"]) end)]
),
io:format(
user,
"Mid log timing ~p~n",
[timer:tc(fun() -> log(pc013, ["test_file"]) end)]
),
io:format(
user,
"End log timing ~p~n",
[timer:tc(fun() -> log(b0003, ["testing"]) end)]
),
io:format(
user,
"Big log timing ~p~n",
[timer:tc(fun() -> log(sst13, [100,100,100,100,true,1]) end)]
),
io:format(
user,
"Timer log timing ~p~n",
[timer:tc(fun() -> log_timer(pc015, [], os:timestamp()) end)]
).
-endif. -endif.

View file

@ -18,12 +18,11 @@
%% garbage is considered acceptable. %% garbage is considered acceptable.
%% %%
-module(leveled_pclerk). -module(leveled_pclerk).
-behaviour(gen_server). -behaviour(gen_server).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
init/1, init/1,

View file

@ -154,7 +154,6 @@
%% the current level zero in-memory view. %% the current level zero in-memory view.
%% %%
-module(leveled_penciller). -module(leveled_penciller).
-behaviour(gen_server). -behaviour(gen_server).

View file

@ -14,10 +14,9 @@
%% access the cache) %% access the cache)
%% - Use a skiplist like enhanced list at lower levels. %% - Use a skiplist like enhanced list at lower levels.
-module(leveled_pmanifest). -module(leveled_pmanifest).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
new_manifest/0, new_manifest/0,

View file

@ -26,7 +26,7 @@
-module(leveled_pmem). -module(leveled_pmem).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
prepare_for_index/2, prepare_for_index/2,

View file

@ -16,10 +16,9 @@
%% check that the item is available in the Journal via the Inker as part of %% check that the item is available in the Journal via the Inker as part of
%% the fold. This may be useful for anti-entropy folds %% the fold. This may be useful for anti-entropy folds
-module(leveled_runner). -module(leveled_runner).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
bucket_sizestats/3, bucket_sizestats/3,

View file

@ -61,7 +61,7 @@
-behaviour(gen_statem). -behaviour(gen_statem).
-include("include/leveled.hrl"). -include("leveled.hrl").
-define(LOOK_SLOTSIZE, 128). % Maximum of 128 -define(LOOK_SLOTSIZE, 128). % Maximum of 128
-define(LOOK_BLOCKSIZE, {24, 32}). % 4x + y = ?LOOK_SLOTSIZE -define(LOOK_BLOCKSIZE, {24, 32}). % 4x + y = ?LOOK_SLOTSIZE

View file

@ -49,7 +49,6 @@
%% version of the segment-leaf hash from the previous level 1 hash). %% version of the segment-leaf hash from the previous level 1 hash).
%% %%
-module(leveled_tictac). -module(leveled_tictac).
-export([ -export([

View file

@ -10,7 +10,7 @@
-module(leveled_tree). -module(leveled_tree).
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([ -export([
from_orderedlist/2, from_orderedlist/2,

View file

@ -17,7 +17,6 @@
-define(WRITE_OPS, [binary, raw, read, write]). -define(WRITE_OPS, [binary, raw, read, write]).
-spec generate_uuid() -> list(). -spec generate_uuid() -> list().
%% @doc %% @doc
%% Generate a new globally unique ID as a string. %% Generate a new globally unique ID as a string.

View file

@ -1,7 +1,6 @@
-module(appdefined_SUITE). -module(appdefined_SUITE).
-include_lib("common_test/include/ct.hrl"). -include("leveled.hrl").
-include("include/leveled.hrl"). -export([all/0, init_per_suite/1, end_per_suite/1]).
-export([all/0]).
-export([ -export([
application_defined_tag/1, application_defined_tag/1,
bespoketag_recalc/1 bespoketag_recalc/1
@ -12,7 +11,12 @@ all() -> [
bespoketag_recalc bespoketag_recalc
]. ].
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "appdefined"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
application_defined_tag(_Config) -> application_defined_tag(_Config) ->
T1 = os:timestamp(), T1 = os:timestamp(),

View file

@ -1,7 +1,6 @@
-module(basic_SUITE). -module(basic_SUITE).
-include_lib("common_test/include/ct.hrl"). -include("leveled.hrl").
-include("include/leveled.hrl"). -export([all/0, init_per_suite/1, end_per_suite/1]).
-export([all/0]).
-export([simple_put_fetch_head_delete/1, -export([simple_put_fetch_head_delete/1,
many_put_fetch_head/1, many_put_fetch_head/1,
journal_compaction/1, journal_compaction/1,
@ -36,6 +35,13 @@ all() -> [
]. ].
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "basic"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
simple_put_fetch_head_delete(_Config) -> simple_put_fetch_head_delete(_Config) ->
io:format("simple test with info and no forced logs~n"), io:format("simple test with info and no forced logs~n"),
simple_test_withlog(info, []), simple_test_withlog(info, []),

View file

@ -1,11 +1,10 @@
-module(iterator_SUITE). -module(iterator_SUITE).
-include_lib("common_test/include/ct.hrl"). -include("leveled.hrl").
-include("include/leveled.hrl").
-define(KEY_ONLY, {false, undefined}). -define(KEY_ONLY, {false, undefined}).
-export([all/0]). -export([all/0, init_per_suite/1, end_per_suite/1]).
-export([expiring_indexes/1, -export([expiring_indexes/1,
breaking_folds/1, breaking_folds/1,
single_object_with2i/1, single_object_with2i/1,
@ -26,6 +25,12 @@ all() -> [
foldobjects_bybucket_range foldobjects_bybucket_range
]. ].
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "iterator"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
expiring_indexes(_Config) -> expiring_indexes(_Config) ->
% Add objects to the store with index entries, where the objects (and hence % Add objects to the store with index entries, where the objects (and hence

View file

@ -1,7 +1,9 @@
-module(perf_SUITE). -module(perf_SUITE).
-include("../include/leveled.hrl").
-include("leveled.hrl").
-define(INFO, info). -define(INFO, info).
-export([all/0, suite/0]). -export([all/0, suite/0, init_per_suite/1, end_per_suite/1]).
-export([ -export([
riak_ctperf/1, riak_fullperf/1, riak_profileperf/1, riak_miniperf/1 riak_ctperf/1, riak_fullperf/1, riak_profileperf/1, riak_miniperf/1
]). ]).
@ -25,6 +27,13 @@ all() -> [?performance].
suite() -> [{timetrap, {hours, 16}}]. suite() -> [{timetrap, {hours, 16}}].
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "perf"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
riak_fullperf(_Config) -> riak_fullperf(_Config) ->
riak_fullperf(2048, zstd, as_store). riak_fullperf(2048, zstd, as_store).

View file

@ -1,7 +1,8 @@
-module(recovery_SUITE). -module(recovery_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([all/0]).
-export([all/0, init_per_suite/1, end_per_suite/1]).
-export([ -export([
recovery_with_samekeyupdates/1, recovery_with_samekeyupdates/1,
same_key_rotation_withindexes/1, same_key_rotation_withindexes/1,
@ -38,6 +39,12 @@ all() -> [
replace_everything replace_everything
]. ].
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "recovery"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
replace_everything(_Config) -> replace_everything(_Config) ->
% See https://github.com/martinsumner/leveled/issues/389 % See https://github.com/martinsumner/leveled/issues/389

View file

@ -1,7 +1,8 @@
-module(riak_SUITE). -module(riak_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl"). -include("leveled.hrl").
-export([all/0]).
-export([all/0, init_per_suite/1, end_per_suite/1]).
-export([ -export([
basic_riak/1, basic_riak/1,
fetchclocks_modifiedbetween/1, fetchclocks_modifiedbetween/1,
@ -26,6 +27,12 @@ all() -> [
-define(MAGIC, 53). % riak_kv -> riak_object -define(MAGIC, 53). % riak_kv -> riak_object
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "riak"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
basic_riak(_Config) -> basic_riak(_Config) ->
basic_riak_tester(<<"B0">>, 640000), basic_riak_tester(<<"B0">>, 640000),
@ -65,38 +72,48 @@ basic_riak_tester(Bucket, KeyCount) ->
CountPerList = KeyCount div 5, CountPerList = KeyCount div 5,
ObjList1 = ObjList1 =
testutil:generate_objects(CountPerList, testutil:generate_objects(
{fixed_binary, 1}, [], CountPerList,
leveled_rand:rand_bytes(512), {fixed_binary, 1}, [],
IndexGenFun(1), leveled_rand:rand_bytes(512),
Bucket), IndexGenFun(1),
Bucket
),
ObjList2 = ObjList2 =
testutil:generate_objects(CountPerList, testutil:generate_objects(
{fixed_binary, CountPerList + 1}, [], CountPerList,
leveled_rand:rand_bytes(512), {fixed_binary, CountPerList + 1}, [],
IndexGenFun(2), leveled_rand:rand_bytes(512),
Bucket), IndexGenFun(2),
Bucket
),
ObjList3 = ObjList3 =
testutil:generate_objects(CountPerList, testutil:generate_objects(
{fixed_binary, 2 * CountPerList + 1}, [], CountPerList,
leveled_rand:rand_bytes(512), {fixed_binary, 2 * CountPerList + 1}, [],
IndexGenFun(3), leveled_rand:rand_bytes(512),
Bucket), IndexGenFun(3),
Bucket
),
ObjList4 = ObjList4 =
testutil:generate_objects(CountPerList, testutil:generate_objects(
{fixed_binary, 3 * CountPerList + 1}, [], CountPerList,
leveled_rand:rand_bytes(512), {fixed_binary, 3 * CountPerList + 1}, [],
IndexGenFun(4), leveled_rand:rand_bytes(512),
Bucket), IndexGenFun(4),
Bucket
),
ObjList5 = ObjList5 =
testutil:generate_objects(CountPerList, testutil:generate_objects(
{fixed_binary, 4 * CountPerList + 1}, [], CountPerList,
leveled_rand:rand_bytes(512), {fixed_binary, 4 * CountPerList + 1}, [],
IndexGenFun(5), leveled_rand:rand_bytes(512),
Bucket), IndexGenFun(5),
Bucket
),
% Mix with the ordering on the load, just in case ordering hides issues % Mix with the ordering on the load, just in case ordering hides issues
testutil:riakload(Bookie1, ObjList4), testutil:riakload(Bookie1, ObjList4),
@ -124,13 +141,13 @@ basic_riak_tester(Bucket, KeyCount) ->
ID = integer_to_list(Idx), ID = integer_to_list(Idx),
Index = list_to_binary("integer" ++ ID ++ "_int"), Index = list_to_binary("integer" ++ ID ++ "_int"),
{async, R} = {async, R} =
leveled_bookie:book_indexfold(Book, leveled_bookie:book_indexfold(
{Bucket, <<>>}, Book,
{FoldKeysFun, []}, {Bucket, <<>>},
{Index, {FoldKeysFun, []},
IC, {Index, IC, IC},
IC}, {true, undefined}
{true, undefined}), ),
KTL = R(), KTL = R(),
CountAcc + length(KTL) CountAcc + length(KTL)
end end
@ -141,13 +158,13 @@ basic_riak_tester(Bucket, KeyCount) ->
ID = integer_to_list(Idx), ID = integer_to_list(Idx),
Index = list_to_binary("binary" ++ ID ++ "_bin"), Index = list_to_binary("binary" ++ ID ++ "_bin"),
{async, R} = {async, R} =
leveled_bookie:book_indexfold(Book, leveled_bookie:book_indexfold(
{Bucket, <<>>}, Book,
{FoldKeysFun, []}, {Bucket, <<>>},
{Index, {FoldKeysFun, []},
<<IC:32/integer>>, {Index, <<IC:32/integer>>, <<IC:32/integer>>},
<<IC:32/integer>>}, {true, undefined}
{true, undefined}), ),
KTL = R(), KTL = R(),
CountAcc + length(KTL) CountAcc + length(KTL)
end end
@ -215,11 +232,13 @@ basic_riak_tester(Bucket, KeyCount) ->
[Obj1, Obj2, Obj3, Obj4, Obj5, Obj2L]), [Obj1, Obj2, Obj3, Obj4, Obj5, Obj2L]),
{async, HeadR} = {async, HeadR} =
leveled_bookie:book_headfold(Bookie2, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie2,
{HeadFoldFun, []}, ?RIAK_TAG,
true, false, {HeadFoldFun, []},
SegList), true, false,
SegList
),
SW_SL0 = os:timestamp(), SW_SL0 = os:timestamp(),
KLBySeg = HeadR(), KLBySeg = HeadR(),
io:format("SegList Headfold returned ~w heads in ~w ms~n", io:format("SegList Headfold returned ~w heads in ~w ms~n",
@ -230,12 +249,13 @@ basic_riak_tester(Bucket, KeyCount) ->
true = length(KLBySeg) - length(KLBySegRem) == length(BKList), true = length(KLBySeg) - length(KLBySegRem) == length(BKList),
{async, HeadRFalsePositive} = {async, HeadRFalsePositive} =
leveled_bookie:book_headfold(Bookie2, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie2,
{HeadFoldFun, []}, ?RIAK_TAG,
true, false, {HeadFoldFun, []},
SegList ++ lists:seq(1, 256)), true, false,
% Make it a large seg list SegList ++ lists:seq(1, 256)
), % Make it a large seg list
SW_SL1 = os:timestamp(), SW_SL1 = os:timestamp(),
KLByXcessSeg = HeadRFalsePositive(), KLByXcessSeg = HeadRFalsePositive(),
io:format("SegList Headfold with xcess segments returned ~w heads in ~w ms~n", io:format("SegList Headfold with xcess segments returned ~w heads in ~w ms~n",
@ -452,74 +472,88 @@ fetchclocks_modifiedbetween(_Config) ->
{ok, Bookie1B} = leveled_bookie:book_start(StartOpts1B), {ok, Bookie1B} = leveled_bookie:book_start(StartOpts1B),
ObjList0 = ObjList0 =
testutil:generate_objects(100000, testutil:generate_objects(
{fixed_binary, 1}, [], 100000,
leveled_rand:rand_bytes(32), {fixed_binary, 1}, [],
fun() -> [] end, leveled_rand:rand_bytes(32),
<<"BaselineB">>), fun() -> [] end,
<<"BaselineB">>
),
ObjL1StartTS = testutil:convert_to_seconds(os:timestamp()), ObjL1StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList1 = ObjList1 =
testutil:generate_objects(20000, testutil:generate_objects(
{fixed_binary, 1}, [], 20000,
leveled_rand:rand_bytes(512), {fixed_binary, 1}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B0">>), fun() -> [] end,
<<"B0">>
),
timer:sleep(1000), timer:sleep(1000),
ObjL1EndTS = testutil:convert_to_seconds(os:timestamp()), ObjL1EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
_ObjL2StartTS = testutil:convert_to_seconds(os:timestamp()), _ObjL2StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList2 = ObjList2 =
testutil:generate_objects(15000, testutil:generate_objects(
{fixed_binary, 20001}, [], 15000,
leveled_rand:rand_bytes(512), {fixed_binary, 20001}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B0">>), fun() -> [] end,
<<"B0">>
),
timer:sleep(1000), timer:sleep(1000),
_ObjList2EndTS = testutil:convert_to_seconds(os:timestamp()), _ObjList2EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
ObjL3StartTS = testutil:convert_to_seconds(os:timestamp()), ObjL3StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList3 = ObjList3 =
testutil:generate_objects(35000, testutil:generate_objects(
{fixed_binary, 35001}, [], 35000,
leveled_rand:rand_bytes(512), {fixed_binary, 35001}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B0">>), fun() -> [] end,
<<"B0">>
),
timer:sleep(1000), timer:sleep(1000),
ObjL3EndTS = testutil:convert_to_seconds(os:timestamp()), ObjL3EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
ObjL4StartTS = testutil:convert_to_seconds(os:timestamp()), ObjL4StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList4 = ObjList4 =
testutil:generate_objects(30000, testutil:generate_objects(
{fixed_binary, 70001}, [], 30000,
leveled_rand:rand_bytes(512), {fixed_binary, 70001}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B0">>), fun() -> [] end,
<<"B0">>
),
timer:sleep(1000), timer:sleep(1000),
_ObjL4EndTS = testutil:convert_to_seconds(os:timestamp()), _ObjL4EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
ObjL5StartTS = testutil:convert_to_seconds(os:timestamp()), ObjL5StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList5 = ObjList5 =
testutil:generate_objects(8000, testutil:generate_objects(
{fixed_binary, 1}, [], 8000,
leveled_rand:rand_bytes(512), {fixed_binary, 1}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B1">>), fun() -> [] end,
<<"B1">>
),
timer:sleep(1000), timer:sleep(1000),
_ObjL5EndTS = testutil:convert_to_seconds(os:timestamp()), _ObjL5EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
ObjL6StartTS = testutil:convert_to_seconds(os:timestamp()), ObjL6StartTS = testutil:convert_to_seconds(os:timestamp()),
ObjList6 = ObjList6 =
testutil:generate_objects(7000, testutil:generate_objects(
{fixed_binary, 1}, [], 7000,
leveled_rand:rand_bytes(512), {fixed_binary, 1}, [],
fun() -> [] end, leveled_rand:rand_bytes(512),
<<"B2">>), fun() -> [] end,
<<"B2">>
),
timer:sleep(1000), timer:sleep(1000),
ObjL6EndTS = testutil:convert_to_seconds(os:timestamp()), ObjL6EndTS = testutil:convert_to_seconds(os:timestamp()),
timer:sleep(1000), timer:sleep(1000),
@ -561,16 +595,17 @@ fetchclocks_modifiedbetween(_Config) ->
fun(FoldTarget, ModRange, EndNumber, MaxCount) -> fun(FoldTarget, ModRange, EndNumber, MaxCount) ->
fun(_I, {LKN, KC}) -> fun(_I, {LKN, KC}) ->
{async, Runner} = {async, Runner} =
leveled_bookie:book_headfold(FoldTarget, leveled_bookie:book_headfold(
?RIAK_TAG, FoldTarget,
KeyRangeFun(LKN + 1, ?RIAK_TAG,
EndNumber), KeyRangeFun(LKN + 1, EndNumber),
{StoreFoldFun, {LKN, KC}}, {StoreFoldFun, {LKN, KC}},
false, false,
true, true,
false, false,
ModRange, ModRange,
MaxCount), MaxCount
),
{_, {LKN0, KC0}} = Runner(), {_, {LKN0, KC0}} = Runner(),
{LKN0, KC0} {LKN0, KC0}
end end
@ -640,15 +675,15 @@ fetchclocks_modifiedbetween(_Config) ->
NoFilterStart = os:timestamp(), NoFilterStart = os:timestamp(),
{async, R3A_NoFilterRunner} = {async, R3A_NoFilterRunner} =
leveled_bookie:book_headfold(Bookie1A, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1A,
KeyRangeFun(1, 100000), ?RIAK_TAG,
{CrudeStoreFoldFun(ObjL1StartTS, KeyRangeFun(1, 100000),
ObjL1EndTS), {CrudeStoreFoldFun(ObjL1StartTS, ObjL1EndTS), {0, 0}},
{0, 0}}, false,
false, true,
true, false
false), ),
R3A_NoFilter = R3A_NoFilterRunner(), R3A_NoFilter = R3A_NoFilterRunner(),
NoFilterTime = timer:now_diff(os:timestamp(), NoFilterStart) div 1000, NoFilterTime = timer:now_diff(os:timestamp(), NoFilterStart) div 1000,
io:format("R3A_NoFilter ~w~n", [R3A_NoFilter]), io:format("R3A_NoFilter ~w~n", [R3A_NoFilter]),
@ -668,88 +703,99 @@ fetchclocks_modifiedbetween(_Config) ->
end, end,
{async, R4A_MultiBucketRunner} = {async, R4A_MultiBucketRunner} =
leveled_bookie:book_headfold(Bookie1A, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1A,
{bucket_list, [<<"B0">>, <<"B2">>]}, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, {bucket_list, [<<"B0">>, <<"B2">>]},
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
% Range includes ObjjL5 LMDs, {ObjL4StartTS, ObjL6EndTS},
% but these ar enot in bucket list % Range includes ObjjL5 LMDs,
false), % but these ar enot in bucket list
false
),
R4A_MultiBucket = R4A_MultiBucketRunner(), R4A_MultiBucket = R4A_MultiBucketRunner(),
io:format("R4A_MultiBucket ~w ~n", [R4A_MultiBucket]), io:format("R4A_MultiBucket ~w ~n", [R4A_MultiBucket]),
true = R4A_MultiBucket == 37000, true = R4A_MultiBucket == 37000,
{async, R5A_MultiBucketRunner} = {async, R5A_MultiBucketRunner} =
leveled_bookie:book_headfold(Bookie1A, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1A,
{bucket_list, [<<"B2">>, <<"B0">>]}, ?RIAK_TAG,
% Reverse the buckets in the bucket {bucket_list, [<<"B2">>, <<"B0">>]},
% list % Reverse the buckets in the bucket
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, % list
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
R5A_MultiBucket = R5A_MultiBucketRunner(), R5A_MultiBucket = R5A_MultiBucketRunner(),
io:format("R5A_MultiBucket ~w ~n", [R5A_MultiBucket]), io:format("R5A_MultiBucket ~w ~n", [R5A_MultiBucket]),
true = R5A_MultiBucket == 37000, true = R5A_MultiBucket == 37000,
{async, R5B_MultiBucketRunner} = {async, R5B_MultiBucketRunner} =
leveled_bookie:book_headfold(Bookie1B, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1B,
{bucket_list, ?RIAK_TAG,
[<<"BaselineB">>, <<"B2">>, <<"B0">>]}, {bucket_list, [<<"BaselineB">>, <<"B2">>, <<"B0">>]},
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
false, false,
true, true,
false, false,
{ObjL4StartTS, ObjL6EndTS}, {ObjL4StartTS, ObjL6EndTS},
false), false
),
R5B_MultiBucket = R5B_MultiBucketRunner(), R5B_MultiBucket = R5B_MultiBucketRunner(),
io:format("R5B_MultiBucket ~w ~n", [R5B_MultiBucket]), io:format("R5B_MultiBucket ~w ~n", [R5B_MultiBucket]),
true = R5B_MultiBucket == 37000, true = R5B_MultiBucket == 37000,
testutil:update_some_objects(Bookie1A, ObjList1, 1000), testutil:update_some_objects(Bookie1A, ObjList1, 1000),
R6A_PlusFilter = lists:foldl(FoldRangesFun(Bookie1A, R6A_PlusFilter =
{ObjL1StartTS, ObjL1EndTS}, lists:foldl(
100000, FoldRangesFun(
100000), Bookie1A, {ObjL1StartTS, ObjL1EndTS}, 100000, 100000
{0, 0}, lists:seq(1, 1)), ),
{0, 0},
lists:seq(1, 1)),
io:format("R6A_PlusFilter ~w~n", [R6A_PlusFilter]), io:format("R6A_PlusFilter ~w~n", [R6A_PlusFilter]),
true = 19000 == element(2, R6A_PlusFilter), true = 19000 == element(2, R6A_PlusFilter),
% Hit limit of max count before trying next bucket, with and without a % Hit limit of max count before trying next bucket, with and without a
% timestamp filter % timestamp filter
{async, R7A_MultiBucketRunner} = {async, R7A_MultiBucketRunner} =
leveled_bookie:book_headfold(Bookie1A, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1A,
{bucket_list, [<<"B1">>, <<"B2">>]}, ?RIAK_TAG,
{SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, {bucket_list, [<<"B1">>, <<"B2">>]},
false, {SimpleCountFun([<<"B1">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL5StartTS, ObjL6EndTS}, false,
5000), {ObjL5StartTS, ObjL6EndTS},
5000
),
R7A_MultiBucket = R7A_MultiBucketRunner(), R7A_MultiBucket = R7A_MultiBucketRunner(),
io:format("R7A_MultiBucket ~w ~n", [R7A_MultiBucket]), io:format("R7A_MultiBucket ~w ~n", [R7A_MultiBucket]),
true = R7A_MultiBucket == {0, 5000}, true = R7A_MultiBucket == {0, 5000},
{async, R8A_MultiBucketRunner} = {async, R8A_MultiBucketRunner} =
leveled_bookie:book_headfold(Bookie1A, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1A,
{bucket_list, [<<"B1">>, <<"B2">>]}, ?RIAK_TAG,
{SimpleCountFun([<<"B1">>, <<"B2">>]), 0}, {bucket_list, [<<"B1">>, <<"B2">>]},
false, {SimpleCountFun([<<"B1">>, <<"B2">>]), 0},
true, false,
false, true,
false, false,
5000), false,
5000
),
R8A_MultiBucket = R8A_MultiBucketRunner(), R8A_MultiBucket = R8A_MultiBucketRunner(),
io:format("R8A_MultiBucket ~w ~n", [R8A_MultiBucket]), io:format("R8A_MultiBucket ~w ~n", [R8A_MultiBucket]),
true = R8A_MultiBucket == {0, 5000}, true = R8A_MultiBucket == {0, 5000},
@ -766,11 +812,13 @@ fetchclocks_modifiedbetween(_Config) ->
io:format("Push tested keys down levels with new objects~n"), io:format("Push tested keys down levels with new objects~n"),
ObjList7 = ObjList7 =
testutil:generate_objects(200000, testutil:generate_objects(
{fixed_binary, 1}, [], 200000,
leveled_rand:rand_bytes(32), {fixed_binary, 1}, [],
fun() -> [] end, leveled_rand:rand_bytes(32),
<<"B1.9">>), fun() -> [] end,
<<"B1.9">>
),
testutil:riakload(Bookie1BS, ObjList7), testutil:riakload(Bookie1BS, ObjList7),
lmdrange_tester(Bookie1BS, SimpleCountFun, lmdrange_tester(Bookie1BS, SimpleCountFun,
@ -794,28 +842,32 @@ time_filtered_query(FoldRangesFun, Bookie, ObjL1StartTS, ObjL1EndTS) ->
lmdrange_tester(Bookie1BS, SimpleCountFun, lmdrange_tester(Bookie1BS, SimpleCountFun,
ObjL4StartTS, ObjL6StartTS, ObjL6EndTS, TooLate) -> ObjL4StartTS, ObjL6StartTS, ObjL6EndTS, TooLate) ->
{async, R5B_MultiBucketRunner0} = {async, R5B_MultiBucketRunner0} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
all, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, all,
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
R5B_MultiBucket0 = R5B_MultiBucketRunner0(), R5B_MultiBucket0 = R5B_MultiBucketRunner0(),
io:format("R5B_MultiBucket0 ~w ~n", [R5B_MultiBucket0]), io:format("R5B_MultiBucket0 ~w ~n", [R5B_MultiBucket0]),
true = R5B_MultiBucket0 == 37000, true = R5B_MultiBucket0 == 37000,
{async, R5B_MultiBucketRunner1} = {async, R5B_MultiBucketRunner1} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
all, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, all,
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
R5B_MultiBucket1 = R5B_MultiBucketRunner1(), R5B_MultiBucket1 = R5B_MultiBucketRunner1(),
io:format("R5B_MultiBucket1 ~w ~n", [R5B_MultiBucket1]), io:format("R5B_MultiBucket1 ~w ~n", [R5B_MultiBucket1]),
true = R5B_MultiBucket1 == 37000, true = R5B_MultiBucket1 == 37000,
@ -829,40 +881,46 @@ lmdrange_tester(Bookie1BS, SimpleCountFun,
end end
end, end,
{async, R5B_MultiBucketRunner2} = {async, R5B_MultiBucketRunner2} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
{bucket_list, [<<"B0">>, <<"B2">>]}, ?RIAK_TAG,
{SimpleMinMaxFun, []}, {bucket_list, [<<"B0">>, <<"B2">>]},
false, {SimpleMinMaxFun, []},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
[{<<"B0">>, MinB0K, MaxB0K}, {<<"B2">>, MinB2K, MaxB2K}] = [{<<"B0">>, MinB0K, MaxB0K}, {<<"B2">>, MinB2K, MaxB2K}] =
R5B_MultiBucketRunner2(), R5B_MultiBucketRunner2(),
io:format("Found Min and Max Keys~n"), io:format("Found Min and Max Keys~n"),
io:format("B ~s MinK ~s MaxK ~s~n", [<<"B0">>, MinB0K, MaxB0K]), io:format("B ~s MinK ~s MaxK ~s~n", [<<"B0">>, MinB0K, MaxB0K]),
io:format("B ~s MinK ~s MaxK ~s~n", [<<"B2">>, MinB2K, MaxB2K]), io:format("B ~s MinK ~s MaxK ~s~n", [<<"B2">>, MinB2K, MaxB2K]),
{async, R5B_MultiBucketRunner3a} = {async, R5B_MultiBucketRunner3a} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
{range, <<"B0">>, {MinB0K, MaxB0K}}, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>]), 0}, {range, <<"B0">>, {MinB0K, MaxB0K}},
false, {SimpleCountFun([<<"B0">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
{async, R5B_MultiBucketRunner3b} = {async, R5B_MultiBucketRunner3b} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
{range, <<"B2">>, {MinB2K, MaxB2K}}, ?RIAK_TAG,
{SimpleCountFun([<<"B2">>]), 0}, {range, <<"B2">>, {MinB2K, MaxB2K}},
false, {SimpleCountFun([<<"B2">>]), 0},
true, false,
false, true,
{ObjL4StartTS, ObjL6EndTS}, false,
false), {ObjL4StartTS, ObjL6EndTS},
false
),
R5B_MultiBucket3a = R5B_MultiBucketRunner3a(), R5B_MultiBucket3a = R5B_MultiBucketRunner3a(),
io:format("R5B_MultiBucket3a ~w ~n", [R5B_MultiBucket3a]), io:format("R5B_MultiBucket3a ~w ~n", [R5B_MultiBucket3a]),
R5B_MultiBucket3b = R5B_MultiBucketRunner3b(), R5B_MultiBucket3b = R5B_MultiBucketRunner3b(),
@ -871,32 +929,33 @@ lmdrange_tester(Bookie1BS, SimpleCountFun,
io:format("Query outside of time range~n"), io:format("Query outside of time range~n"),
{async, R5B_MultiBucketRunner4} = {async, R5B_MultiBucketRunner4} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
all, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, all,
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL6EndTS, false,
TooLate}, {ObjL6EndTS, TooLate},
false), false
),
R5B_MultiBucket4 = R5B_MultiBucketRunner4(), R5B_MultiBucket4 = R5B_MultiBucketRunner4(),
io:format("R5B_MultiBucket4 ~w ~n", [R5B_MultiBucket4]), io:format("R5B_MultiBucket4 ~w ~n", [R5B_MultiBucket4]),
true = R5B_MultiBucket4 == 0, true = R5B_MultiBucket4 == 0,
io:format("Query with one foot inside of time range~n"), io:format("Query with one foot inside of time range~n"),
{async, R5B_MultiBucketRunner5} = {async, R5B_MultiBucketRunner5} =
leveled_bookie:book_headfold(Bookie1BS, leveled_bookie:book_headfold(
?RIAK_TAG, Bookie1BS,
all, ?RIAK_TAG,
{SimpleCountFun([<<"B0">>, <<"B2">>]), 0}, all,
false, {SimpleCountFun([<<"B0">>, <<"B2">>]), 0},
true, false,
false, true,
{ObjL6StartTS, false,
TooLate}, {ObjL6StartTS, TooLate},
false), false),
R5B_MultiBucket5 = R5B_MultiBucketRunner5(), R5B_MultiBucket5 = R5B_MultiBucketRunner5(),
io:format("R5B_MultiBucket5 ~w ~n", [R5B_MultiBucket5]), io:format("R5B_MultiBucket5 ~w ~n", [R5B_MultiBucket5]),
true = R5B_MultiBucket5 == 7000. true = R5B_MultiBucket5 == 7000.
@ -1310,11 +1369,13 @@ handoff(_Config) ->
% Handoff the data from the first store to the other three stores % Handoff the data from the first store to the other three stores
{async, Handoff2} = {async, Handoff2} =
leveled_bookie:book_objectfold(Bookie1, leveled_bookie:book_objectfold(
?RIAK_TAG, Bookie1,
{FoldObjectsFun(Bookie2), ok}, ?RIAK_TAG,
false, {FoldObjectsFun(Bookie2), ok},
key_order), false,
key_order
),
SW2 = os:timestamp(), SW2 = os:timestamp(),
ok = Handoff2(), ok = Handoff2(),
Time_HO2 = timer:now_diff(os:timestamp(), SW2)/1000, Time_HO2 = timer:now_diff(os:timestamp(), SW2)/1000,
@ -1322,22 +1383,26 @@ handoff(_Config) ->
[Time_HO2]), [Time_HO2]),
SW3 = os:timestamp(), SW3 = os:timestamp(),
{async, Handoff3} = {async, Handoff3} =
leveled_bookie:book_objectfold(Bookie1, leveled_bookie:book_objectfold(
?RIAK_TAG, Bookie1,
{FoldObjectsFun(Bookie3), ok}, ?RIAK_TAG,
true, {FoldObjectsFun(Bookie3), ok},
sqn_order), true,
sqn_order
),
ok = Handoff3(), ok = Handoff3(),
Time_HO3 = timer:now_diff(os:timestamp(), SW3)/1000, Time_HO3 = timer:now_diff(os:timestamp(), SW3)/1000,
io:format("Handoff to Book3 in sqn_order took ~w milliseconds ~n", io:format("Handoff to Book3 in sqn_order took ~w milliseconds ~n",
[Time_HO3]), [Time_HO3]),
SW4 = os:timestamp(), SW4 = os:timestamp(),
{async, Handoff4} = {async, Handoff4} =
leveled_bookie:book_objectfold(Bookie1, leveled_bookie:book_objectfold(
?RIAK_TAG, Bookie1,
{FoldObjectsFun(Bookie4), ok}, ?RIAK_TAG,
true, {FoldObjectsFun(Bookie4), ok},
sqn_order), true,
sqn_order
),
ok = Handoff4(), ok = Handoff4(),
Time_HO4 = timer:now_diff(os:timestamp(), SW4)/1000, Time_HO4 = timer:now_diff(os:timestamp(), SW4)/1000,

View file

@ -2,6 +2,8 @@
-include("../include/leveled.hrl"). -include("../include/leveled.hrl").
-export([init_per_suite/1, end_per_suite/1]).
-export([book_riakput/3, -export([book_riakput/3,
book_tempriakput/4, book_tempriakput/4,
book_riakdelete/4, book_riakdelete/4,
@ -91,6 +93,59 @@
updatemetadata=dict:store(clean, true, dict:new()), updatemetadata=dict:store(clean, true, dict:new()),
updatevalue :: term()}). updatevalue :: term()}).
init_per_suite(Config) ->
LogTemplate = [time, " log_level=", level, " ", msg, "\n"],
LogFormatter =
{
logger_formatter,
#{
time_designator => $\s,
template => LogTemplate
}
},
{suite, SUITEName} = lists:keyfind(suite, 1, Config),
FileName = "leveled_" ++ SUITEName ++ "_ct.log",
LogConfig =
#{
config =>
#{
file => FileName,
max_no_files => 5
}
},
LogFilter =
fun(LogEvent, LogType) ->
Meta = maps:get(meta, LogEvent),
case maps:get(log_type, Meta, not_found) of
LogType ->
LogEvent;
_ ->
ignore
end
end,
ok = logger:add_handler(logfile, logger_std_h, LogConfig),
ok = logger:set_handler_config(logfile, formatter, LogFormatter),
ok = logger:set_handler_config(logfile, level, info),
ok = logger:add_handler_filter(logfile, type_filter, {LogFilter, backend}),
ok = logger:set_handler_config(default, level, notice),
ok = logger:set_handler_config(cth_log_redirect, level, notice),
ok = logger:set_primary_config(level, info),
Config.
end_per_suite(_Config) ->
ok = logger:remove_handler(logfile),
ok = logger:set_primary_config(level, notice),
ok = logger:set_handler_config(default, level, all),
ok = logger:set_handler_config(cth_log_redirect, level, all),
ok.
riak_object(Bucket, Key, Value, MetaData) -> riak_object(Bucket, Key, Value, MetaData) ->
Content = #r_content{metadata=dict:from_list(MetaData), value=Value}, Content = #r_content{metadata=dict:from_list(MetaData), value=Value},
Obj = #r_object{bucket=Bucket, Obj = #r_object{bucket=Bucket,

View file

@ -1,7 +1,6 @@
-module(tictac_SUITE). -module(tictac_SUITE).
-include_lib("common_test/include/ct.hrl"). -include("leveled.hrl").
-include("include/leveled.hrl"). -export([all/0, init_per_suite/1, end_per_suite/1]).
-export([all/0]).
-export([ -export([
many_put_compare/1, many_put_compare/1,
index_compare/1, index_compare/1,
@ -16,11 +15,18 @@ all() -> [
tuplebuckets_headonly tuplebuckets_headonly
]. ].
-define(LMD_FORMAT, "~4..0w~2..0w~2..0w~2..0w~2..0w").
-define(V1_VERS, 1). -define(V1_VERS, 1).
-define(MAGIC, 53). % riak_kv -> riak_object -define(MAGIC, 53). % riak_kv -> riak_object
init_per_suite(Config) ->
testutil:init_per_suite([{suite, "tictac"}|Config]),
Config.
end_per_suite(Config) ->
testutil:end_per_suite(Config).
many_put_compare(_Config) -> many_put_compare(_Config) ->
TreeSize = small, TreeSize = small,
SegmentCount = 256 * 256, SegmentCount = 256 * 256,
% Test requires multiple different databases, so want to mount them all % Test requires multiple different databases, so want to mount them all