store log opts in pdict + inherit at proc start
This commit is contained in:
parent
fbe200f1ca
commit
d30ca16a17
6 changed files with 65 additions and 19 deletions
|
@ -1061,9 +1061,8 @@ init([Opts]) ->
|
|||
{InkerOpts, PencillerOpts} = set_options(Opts),
|
||||
|
||||
LogLevel = proplists:get_value(log_level, Opts),
|
||||
ok = application:set_env(leveled, log_level, LogLevel),
|
||||
ForcedLogs = proplists:get_value(forced_logs, Opts),
|
||||
ok = application:set_env(leveled, forced_logs, ForcedLogs),
|
||||
leveled_log:save(LogLevel, ForcedLogs),
|
||||
|
||||
ConfiguredCacheSize =
|
||||
max(proplists:get_value(cache_size, Opts), ?MIN_CACHE_SIZE),
|
||||
|
|
|
@ -140,7 +140,7 @@
|
|||
%% @doc
|
||||
%% Generate a new clerk
|
||||
clerk_new(InkerClerkOpts) ->
|
||||
gen_server:start_link(?MODULE, [InkerClerkOpts], []).
|
||||
gen_server:start_link(?MODULE, [leveled_log:get_opts(), InkerClerkOpts], []).
|
||||
|
||||
-spec clerk_compact(pid(), pid(),
|
||||
fun(), fun(), fun(),
|
||||
|
@ -170,7 +170,8 @@ clerk_trim(Pid, Inker, PersistedSQN) ->
|
|||
%% of the hastable in the CDB file - so that the file is not blocked during
|
||||
%% this calculation
|
||||
clerk_hashtablecalc(HashTree, StartPos, CDBpid) ->
|
||||
{ok, Clerk} = gen_server:start_link(?MODULE, [#iclerk_options{}], []),
|
||||
{ok, Clerk} = gen_server:start_link(?MODULE, [leveled_log:get_opts(),
|
||||
#iclerk_options{}], []),
|
||||
gen_server:cast(Clerk, {hashtable_calc, HashTree, StartPos, CDBpid}).
|
||||
|
||||
-spec clerk_stop(pid()) -> ok.
|
||||
|
@ -183,7 +184,8 @@ clerk_stop(Pid) ->
|
|||
%%% gen_server callbacks
|
||||
%%%============================================================================
|
||||
|
||||
init([IClerkOpts]) ->
|
||||
init([LogOpts, IClerkOpts]) ->
|
||||
leveled_log:save(LogOpts),
|
||||
ReloadStrategy = IClerkOpts#iclerk_options.reload_strategy,
|
||||
CDBopts = IClerkOpts#iclerk_options.cdb_options,
|
||||
WP = CDBopts#cdb_options.waste_path,
|
||||
|
|
|
@ -174,13 +174,13 @@
|
|||
%% The inker will need to know what the reload strategy is, to inform the
|
||||
%% clerk about the rules to enforce during compaction.
|
||||
ink_start(InkerOpts) ->
|
||||
gen_server:start_link(?MODULE, [InkerOpts], []).
|
||||
gen_server:start_link(?MODULE, [leveled_log:get_opts(), InkerOpts], []).
|
||||
|
||||
-spec ink_snapstart(inker_options()) -> {ok, pid()}.
|
||||
%% @doc
|
||||
%% Don't link on startup as snapshot
|
||||
ink_snapstart(InkerOpts) ->
|
||||
gen_server:start(?MODULE, [InkerOpts], []).
|
||||
gen_server:start(?MODULE, [leveled_log:get_opts(), InkerOpts], []).
|
||||
|
||||
-spec ink_put(pid(),
|
||||
leveled_codec:ledger_key(),
|
||||
|
@ -450,7 +450,8 @@ ink_checksqn(Pid, LedgerSQN) ->
|
|||
%%% gen_server callbacks
|
||||
%%%============================================================================
|
||||
|
||||
init([InkerOpts]) ->
|
||||
init([LogOpts, InkerOpts]) ->
|
||||
leveled_log:save(LogOpts),
|
||||
leveled_rand:seed(),
|
||||
case {InkerOpts#inker_options.root_path,
|
||||
InkerOpts#inker_options.start_snapshot} of
|
||||
|
|
|
@ -9,9 +9,14 @@
|
|||
|
||||
-export([log/2,
|
||||
log_timer/3,
|
||||
log_randomtimer/4]).
|
||||
log_randomtimer/4]).
|
||||
|
||||
-define(LOG_LEVELS, [debug, info, warn, error, critical]).
|
||||
-export([save/1, save/2,
|
||||
get_opts/0]).
|
||||
|
||||
-type log_level() :: debug | info | warn | error | critical.
|
||||
-type log_levels() :: [log_level()].
|
||||
-define(LOG_LEVELS, [debug, info, warn, error, critical]).
|
||||
|
||||
-define(DEFAULT_LOG_LEVEL, error).
|
||||
|
||||
|
@ -375,6 +380,28 @@
|
|||
{warn, "Error ~w caught when safe reading a file to length ~w"}}
|
||||
]).
|
||||
|
||||
-record(log_options,
|
||||
{log_level = info :: leveled_log:log_levels(),
|
||||
forced_logs = [] :: [string()]}).
|
||||
|
||||
save(LogLevel, ForcedLogs) when is_list(ForcedLogs), is_atom(LogLevel) ->
|
||||
save(#log_options{log_level = LogLevel,
|
||||
forced_logs = ForcedLogs}).
|
||||
|
||||
save(#log_options{} = LO) ->
|
||||
put('$leveled_log_options', LO),
|
||||
ok.
|
||||
|
||||
get_opts() ->
|
||||
case get('$leveled_log_options') of
|
||||
undefined ->
|
||||
LogLevel = application:get_env(leveled, log_level, ?DEFAULT_LOG_LEVEL),
|
||||
ForcedLogs = application:get_env(leveled, forced_logs, []),
|
||||
#log_options{log_level = LogLevel,
|
||||
forced_logs = ForcedLogs};
|
||||
#log_options{} = LO ->
|
||||
LO
|
||||
end.
|
||||
|
||||
log(LogReference, Subs) ->
|
||||
log(LogReference, Subs, ?LOG_LEVELS).
|
||||
|
@ -396,15 +423,15 @@ log(LogRef, Subs, SupportedLogLevels) ->
|
|||
end.
|
||||
|
||||
should_i_log(LogLevel, Levels, LogRef) ->
|
||||
ForcedLogs = application:get_env(leveled, forced_logs, []),
|
||||
#log_options{log_level = CurLevel, forced_logs = ForcedLogs} = get_opts(),
|
||||
case lists:member(LogRef, ForcedLogs) of
|
||||
true ->
|
||||
true;
|
||||
false ->
|
||||
case application:get_env(leveled, log_level, ?DEFAULT_LOG_LEVEL) of
|
||||
LogLevel ->
|
||||
if CurLevel == LogLevel ->
|
||||
true;
|
||||
CurLevel ->
|
||||
true;
|
||||
true ->
|
||||
is_active_level(Levels, CurLevel, LogLevel)
|
||||
end
|
||||
end.
|
||||
|
@ -499,4 +526,18 @@ shouldilog_test() ->
|
|||
ok = application:set_env(leveled, log_level, info),
|
||||
?assertMatch(false, should_i_log(debug, ?LOG_LEVELS, "D0001")).
|
||||
|
||||
shouldilog2_test() ->
|
||||
ok = save(unsupported, []),
|
||||
?assertMatch(false, should_i_log(info, ?LOG_LEVELS, "G0001")),
|
||||
?assertMatch(false, should_i_log(inform, ?LOG_LEVELS, "G0001")),
|
||||
ok = save(debug, []),
|
||||
?assertMatch(true, should_i_log(info, ?LOG_LEVELS, "G0001")),
|
||||
ok = save(info, []),
|
||||
?assertMatch(true, should_i_log(info, ?LOG_LEVELS, "G0001")),
|
||||
ok = save(error, ["G0001"]),
|
||||
?assertMatch(true, should_i_log(info, ?LOG_LEVELS, "G0001")),
|
||||
?assertMatch(false, should_i_log(info, ?LOG_LEVELS, "G0002")),
|
||||
ok = save(info, []),
|
||||
?assertMatch(false, should_i_log(debug, ?LOG_LEVELS, "D0001")).
|
||||
|
||||
-endif.
|
||||
|
|
|
@ -60,7 +60,8 @@
|
|||
clerk_new(Owner, Manifest, CompressionMethod) ->
|
||||
{ok, Pid} =
|
||||
gen_server:start_link(?MODULE,
|
||||
[{compression_method, CompressionMethod}],
|
||||
[leveled_log:get_opts(),
|
||||
{compression_method, CompressionMethod}],
|
||||
[]),
|
||||
ok = gen_server:call(Pid, {load, Owner, Manifest}, infinity),
|
||||
leveled_log:log("PC001", [Pid, Owner]),
|
||||
|
@ -82,7 +83,8 @@ clerk_close(Pid) ->
|
|||
%%% gen_server callbacks
|
||||
%%%============================================================================
|
||||
|
||||
init([{compression_method, CompressionMethod}]) ->
|
||||
init([LogOpts, {compression_method, CompressionMethod}]) ->
|
||||
leveled_log:save(LogOpts),
|
||||
{ok, #state{compression_method = CompressionMethod}}.
|
||||
|
||||
handle_call({load, Owner, RootPath}, _From, State) ->
|
||||
|
|
|
@ -317,13 +317,13 @@
|
|||
%% query is run against the level zero space and just the query results are
|
||||
%5 copied into the clone.
|
||||
pcl_start(PCLopts) ->
|
||||
gen_server:start_link(?MODULE, [PCLopts], []).
|
||||
gen_server:start_link(?MODULE, [leveled_log:get_opts(), PCLopts], []).
|
||||
|
||||
-spec pcl_snapstart(penciller_options()) -> {ok, pid()}.
|
||||
%% @doc
|
||||
%% Don't link to the bookie - this is a snpashot
|
||||
pcl_snapstart(PCLopts) ->
|
||||
gen_server:start(?MODULE, [PCLopts], []).
|
||||
gen_server:start(?MODULE, [leveled_log:get_opts(), PCLopts], []).
|
||||
|
||||
-spec pcl_pushmem(pid(), bookies_memory()) -> ok|returned.
|
||||
%% @doc
|
||||
|
@ -600,7 +600,8 @@ pcl_checkforwork(Pid) ->
|
|||
%%% gen_server callbacks
|
||||
%%%============================================================================
|
||||
|
||||
init([PCLopts]) ->
|
||||
init([LogOpts, PCLopts]) ->
|
||||
leveled_log:save(LogOpts),
|
||||
leveled_rand:seed(),
|
||||
case {PCLopts#penciller_options.root_path,
|
||||
PCLopts#penciller_options.start_snapshot,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue