Log rationalisation and format improvements

This commit is contained in:
Martin Sumner 2019-02-13 13:06:28 +00:00
parent baa8353db7
commit 31badfd601
4 changed files with 60 additions and 41 deletions

View file

@ -107,7 +107,8 @@
-define(CACHE_SIZE_JITTER, 25). -define(CACHE_SIZE_JITTER, 25).
-define(JOURNAL_SIZE_JITTER, 20). -define(JOURNAL_SIZE_JITTER, 20).
-define(ABSOLUTEMAX_JOURNALSIZE, 4000000000). -define(ABSOLUTEMAX_JOURNALSIZE, 4000000000).
-define(LONG_RUNNING, 80000). -define(LONG_RUNNING, 200000).
% An individual task taking > 200ms gets a specific log
-define(COMPRESSION_METHOD, lz4). -define(COMPRESSION_METHOD, lz4).
-define(COMPRESSION_POINT, on_receipt). -define(COMPRESSION_POINT, on_receipt).
-define(LOG_LEVEL, info). -define(LOG_LEVEL, info).

View file

@ -894,7 +894,12 @@ open_active_file(FileName) when is_list(FileName) ->
{ok, LastPosition} -> {ok, LastPosition} ->
ok = file:close(Handle); ok = file:close(Handle);
{ok, EndPosition} -> {ok, EndPosition} ->
leveled_log:log("CDB06", [LastPosition, EndPosition]), case {LastPosition, EndPosition} of
{?BASE_POSITION, 0} ->
ok;
_ ->
leveled_log:log("CDB06", [LastPosition, EndPosition])
end,
{ok, _LastPosition} = file:position(Handle, LastPosition), {ok, _LastPosition} = file:position(Handle, LastPosition),
ok = file:truncate(Handle), ok = file:truncate(Handle),
ok = file:close(Handle) ok = file:close(Handle)
@ -1277,7 +1282,13 @@ startup_filter(Key, _ValueAsBin, Position, {Hashtree, _LastKey}, _ExtractFun) ->
scan_over_file(Handle, Position, FilterFun, Output, LastKey) -> scan_over_file(Handle, Position, FilterFun, Output, LastKey) ->
case saferead_keyvalue(Handle) of case saferead_keyvalue(Handle) of
false -> false ->
leveled_log:log("CDB09", [Position]), case {LastKey, Position} of
{empty, ?BASE_POSITION} ->
% Not interesting that we've nothing to read at base
ok;
_ ->
leveled_log:log("CDB09", [Position])
end,
% Bring file back to that position % Bring file back to that position
{ok, Position} = file:position(Handle, {bof, Position}), {ok, Position} = file:position(Handle, {bof, Position}),
{eof, Output}; {eof, Output};
@ -1896,7 +1907,7 @@ dump(FileName) ->
end, end,
NumberOfPairs = lists:foldl(Fn, 0, lists:seq(0,255)) bsr 1, NumberOfPairs = lists:foldl(Fn, 0, lists:seq(0,255)) bsr 1,
io:format("Count of keys in db is ~w~n", [NumberOfPairs]), io:format("Count of keys in db is ~w~n", [NumberOfPairs]),
{ok, _} = file:position(Handle, {bof, 2048}), {ok, _} = file:position(Handle, {bof, ?BASE_POSITION}),
Fn1 = fun(_I, Acc) -> Fn1 = fun(_I, Acc) ->
{KL, VL} = read_next_2_integers(Handle), {KL, VL} = read_next_2_integers(Handle),
{Key, KB} = safe_read_next_keybin(Handle, KL), {Key, KB} = safe_read_next_keybin(Handle, KL),

View file

@ -60,7 +60,7 @@
{"B0011", {"B0011",
{warn, "Call to destroy the store and so all files to be removed"}}, {warn, "Call to destroy the store and so all files to be removed"}},
{"B0013", {"B0013",
{warn, "Long running task took ~w microseconds with task of type ~w"}}, {warn, "Long running task took ~w microseconds with task_type=~w"}},
{"B0015", {"B0015",
{info, "Put timing with sample_count=~w and mem_time=~w ink_time=~w" {info, "Put timing with sample_count=~w and mem_time=~w ink_time=~w"
++ " with total_object_size=~w"}}, ++ " with total_object_size=~w"}},
@ -142,7 +142,8 @@
{warn, "We're doomed - intention recorded to destroy all files"}}, {warn, "We're doomed - intention recorded to destroy all files"}},
{"P0031", {"P0031",
{info, "Completion of update to levelzero" {info, "Completion of update to levelzero"
++ " with cache_size=~w status=~w and update_success=~w"}}, ++ " with cache_size=~w level0_due=~w"
++ " and change_pending=~w"}},
{"P0032", {"P0032",
{info, "Fetch head timing with sample_count=~w and level timings of" {info, "Fetch head timing with sample_count=~w and level timings of"
++ " foundmem_time=~w found0_time=~w found1_time=~w" ++ " foundmem_time=~w found0_time=~w found1_time=~w"
@ -207,7 +208,7 @@
{"PC015", {"PC015",
{info, "File created"}}, {info, "File created"}},
{"PC016", {"PC016",
{info, "Slow fetch from SFT ~w of ~w microseconds at level ~w " {info, "Slow fetch from SFT ~w of ~w us at level ~w "
++ "with result ~w"}}, ++ "with result ~w"}},
{"PC017", {"PC017",
{info, "Notified clerk of manifest change"}}, {info, "Notified clerk of manifest change"}},
@ -222,18 +223,18 @@
{"PC022", {"PC022",
{info, "Storing reference to deletions at ManifestSQN=~w"}}, {info, "Storing reference to deletions at ManifestSQN=~w"}},
{"PM002", {"PM002",
{info, "Completed dump of L0 cache to list of size ~w"}}, {info, "Completed dump of L0 cache to list of l0cache_size=~w"}},
{"SST01", {"SST01",
{info, "SST timing for result ~w is sample ~w total ~w and max ~w"}}, {info, "SST timing for result=~w is sample=~w total=~w and max=~w"}},
{"SST02", {"SST02",
{error, "False result returned from SST with filename ~s as " {error, "False result returned from SST with filename ~s as "
++ "slot ~w has failed crc check"}}, ++ "slot ~w has failed crc check"}},
{"SST03", {"SST03",
{info, "Opening SST file with filename ~s slots ~w and" {info, "Opening SST file with filename ~s slot_count=~w and"
++ " max sqn ~w"}}, ++ " max sqn ~w"}},
{"SST04", {"SST04",
{info, "Exit called for reason ~w on filename ~s"}}, {debug, "Exit called for reason ~w on filename ~s"}},
{"SST05", {"SST05",
{warn, "Rename rogue filename ~s to ~s"}}, {warn, "Rename rogue filename ~s to ~s"}},
{"SST06", {"SST06",
@ -359,14 +360,15 @@
{"CDB05", {"CDB05",
{info, "Closing of filename ~s from state ~w for reason ~w"}}, {info, "Closing of filename ~s from state ~w for reason ~w"}},
{"CDB06", {"CDB06",
{info, "File to be truncated at last position of ~w with end of " {warn, "File to be truncated at last position of ~w with end of "
++ "file at ~w"}}, ++ "file at ~w"}},
{"CDB07", {"CDB07",
{info, "Hashtree computed"}}, {info, "Hashtree computed"}},
{"CDB08", {"CDB08",
{info, "Renaming file from ~s to ~s for which existence is ~w"}}, {info, "Renaming file from ~s to ~s for which existence is ~w"}},
{"CDB09", {"CDB09",
{info, "Failure to read Key/Value at Position ~w in scan"}}, {info, "Failure to read Key/Value at Position ~w in scan " ++
"this may be the end of the file"}},
{"CDB10", {"CDB10",
{info, "CRC check failed due to mismatch"}}, {info, "CRC check failed due to mismatch"}},
{"CDB11", {"CDB11",
@ -374,17 +376,17 @@
{"CDB12", {"CDB12",
{info, "HashTree written"}}, {info, "HashTree written"}},
{"CDB13", {"CDB13",
{info, "Write options of ~w"}}, {debug, "Write options of ~w"}},
{"CDB14", {"CDB14",
{info, "Microsecond timings for hashtree build of " {info, "Microsecond timings for hashtree build of "
++ "to_list ~w sort ~w build ~w"}}, ++ "to_list=~w sort=~w build=~w"}},
{"CDB15", {"CDB15",
{info, "Collision in search for hash ~w"}}, {info, "Collision in search for hash ~w"}},
{"CDB16", {"CDB16",
{info, "CDB scan from start ~w in file with end ~w and last_key ~w"}}, {info, "CDB scan from start ~w in file with end ~w and last_key ~w"}},
{"CDB17", {"CDB17",
{info, "After ~w PUTs total write time is ~w total sync time is ~w " {info, "After ~w PUTs total_write_time=~w total_sync_time=~w "
++ "and max write time is ~w and max sync time is ~w"}}, ++ "and max_write_time=~w and max_sync_time=~w"}},
{"CDB18", {"CDB18",
{info, "Handled return and write of hashtable"}}, {info, "Handled return and write of hashtable"}},
{"CDB19", {"CDB19",
@ -471,9 +473,10 @@ log(LogRef, Subs, SupportedLogLevels) ->
{LogRef, {LogLevel, LogText}} -> {LogRef, {LogLevel, LogText}} ->
case should_i_log(LogLevel, SupportedLogLevels, LogRef) of case should_i_log(LogLevel, SupportedLogLevels, LogRef) of
true -> true ->
io:format(format_time() io:format(format_time() ++ " "
++ " " ++ LogRef ++ " ~w " ++ atom_to_list(LogLevel) ++ " "
++ LogText ++ "~n", ++ LogRef ++ " ~w "
++ LogText ++ "~n",
[self()|Subs]); [self()|Subs]);
false -> false ->
ok ok
@ -507,18 +510,21 @@ log_timer(LogRef, Subs, StartTime, SupportedLogLevels) ->
{LogRef, {LogLevel, LogText}} -> {LogRef, {LogLevel, LogText}} ->
case should_i_log(LogLevel, SupportedLogLevels, LogRef) of case should_i_log(LogLevel, SupportedLogLevels, LogRef) of
true -> true ->
MicroS = timer:now_diff(os:timestamp(), StartTime), DurationText =
{Unit, Time} = case MicroS of case timer:now_diff(os:timestamp(), StartTime) of
MicroS when MicroS < 1000 -> US when US > 1000 ->
{"microsec", MicroS}; " with us_duration=" ++ integer_to_list(US) ++
MicroS -> " or ms_duration="
{"ms", MicroS div 1000} ++ integer_to_list(US div 1000);
end, US ->
io:format(format_time() " with us_duration=" ++ integer_to_list(US)
++ " " ++ LogRef ++ " ~w " end,
++ LogText io:format(format_time() ++ " "
++ " with time taken ~w " ++ Unit ++ "~n", ++ atom_to_list(LogLevel) ++ " "
[self()|Subs] ++ [Time]); ++ LogRef ++ " ~w "
++ LogText
++ DurationText ++ "~n",
[self()|Subs]);
false -> false ->
ok ok
end; end;

View file

@ -234,7 +234,7 @@
-define(PROMPT_WAIT_ONL0, 5). -define(PROMPT_WAIT_ONL0, 5).
-define(WORKQUEUE_BACKLOG_TOLERANCE, 4). -define(WORKQUEUE_BACKLOG_TOLERANCE, 4).
-define(COIN_SIDECOUNT, 5). -define(COIN_SIDECOUNT, 5).
-define(SLOW_FETCH, 20000). -define(SLOW_FETCH, 100000).
-define(ITERATOR_SCANWIDTH, 4). -define(ITERATOR_SCANWIDTH, 4).
-define(TIMING_SAMPLECOUNTDOWN, 10000). -define(TIMING_SAMPLECOUNTDOWN, 10000).
-define(TIMING_SAMPLESIZE, 100). -define(TIMING_SAMPLESIZE, 100).
@ -693,7 +693,7 @@ handle_call({push_mem, {LedgerTable, PushedIdx, MinSQN, MaxSQN}},
State#state{levelzero_pending=L0Pend, State#state{levelzero_pending=L0Pend,
levelzero_constructor=L0Constructor}}; levelzero_constructor=L0Constructor}};
{false, false} -> {false, false} ->
leveled_log:log("P0018", [ok, false, false]), % leveled_log:log("P0018", [ok, false, false]),
PushedTree = PushedTree =
case is_tuple(LedgerTable) of case is_tuple(LedgerTable) of
true -> true ->
@ -1254,7 +1254,7 @@ archive_files(RootPath, UsedFileList) ->
-> pcl_state(). -> pcl_state().
%% @doc %% @doc
%% Update the in-memory cache of recent changes for the penciller. This is %% Update the in-memory cache of recent changes for the penciller. This is
%% the level zer at the top of the tree. %% the level zero at the top of the tree.
%% Once the update is made, there needs to be a decision to potentially roll %% Once the update is made, there needs to be a decision to potentially roll
%% the level-zero memory to an on-disk level zero sst file. This can only %% the level-zero memory to an on-disk level zero sst file. This can only
%% happen when the cache has exeeded the size threshold (with some jitter %% happen when the cache has exeeded the size threshold (with some jitter
@ -1295,17 +1295,18 @@ update_levelzero(L0Size, {PushedTree, PushedIdx, MinSQN, MaxSQN},
end, end,
NoPendingManifestChange = not State#state.work_ongoing, NoPendingManifestChange = not State#state.work_ongoing,
JitterCheck = RandomFactor or CacheMuchTooBig, JitterCheck = RandomFactor or CacheMuchTooBig,
case {CacheTooBig, JitterCheck, NoPendingManifestChange} of Due = CacheTooBig and JitterCheck,
{true, true, true} -> case {Due, NoPendingManifestChange} of
{true, true} ->
{L0Pend, L0Constructor, none} = {L0Pend, L0Constructor, none} =
maybe_roll_memory(UpdState, false), maybe_roll_memory(UpdState, false),
leveled_log:log_timer("P0031", [true, true, L0Pend], SW), LogSubs = [NewL0Size, true, true],
leveled_log:log_timer("P0031", LogSubs, SW),
UpdState#state{levelzero_pending=L0Pend, UpdState#state{levelzero_pending=L0Pend,
levelzero_constructor=L0Constructor}; levelzero_constructor=L0Constructor};
_ -> _ ->
leveled_log:log_timer("P0031", LogSubs = [NewL0Size, Due, NoPendingManifestChange],
[CacheTooBig, JitterCheck, false], leveled_log:log_timer("P0031", LogSubs, SW),
SW),
UpdState UpdState
end end
end. end.