Two memory management helpers
Two helpers for memory management: 1 - a scan over the cdb file may lead to a lot of binary references being made. So force a GC fater the scan. 2 - the penciller files contain slots that will be frequently read - so advice the page cache to pre-load them on startup. This is in response to unexpected memory mangement issues in a potentially non-conventional setup - where the erlang VM held a lot of memory (that could be GC'd , in preference to the page cache - and consequently disk I/O and request latency were higher than expected.
This commit is contained in:
parent
b2d4d766cd
commit
f8b3101a3a
3 changed files with 57 additions and 4 deletions
|
@ -1301,6 +1301,7 @@ scan_over_file(Handle, Position, FilterFun, Output, LastKey) ->
|
||||||
end,
|
end,
|
||||||
% Bring file back to that position
|
% Bring file back to that position
|
||||||
{ok, Position} = file:position(Handle, {bof, Position}),
|
{ok, Position} = file:position(Handle, {bof, Position}),
|
||||||
|
garbage_collect(),
|
||||||
{eof, Output};
|
{eof, Output};
|
||||||
{Key, ValueAsBin, KeyLength, ValueLength} ->
|
{Key, ValueAsBin, KeyLength, ValueLength} ->
|
||||||
NewPosition = case Key of
|
NewPosition = case Key of
|
||||||
|
@ -1316,10 +1317,12 @@ scan_over_file(Handle, Position, FilterFun, Output, LastKey) ->
|
||||||
Output,
|
Output,
|
||||||
fun extract_valueandsize/1) of
|
fun extract_valueandsize/1) of
|
||||||
{stop, UpdOutput} ->
|
{stop, UpdOutput} ->
|
||||||
|
garbage_collect(),
|
||||||
{Position, UpdOutput};
|
{Position, UpdOutput};
|
||||||
{loop, UpdOutput} ->
|
{loop, UpdOutput} ->
|
||||||
case NewPosition of
|
case NewPosition of
|
||||||
eof ->
|
eof ->
|
||||||
|
garbage_collect(),
|
||||||
{eof, UpdOutput};
|
{eof, UpdOutput};
|
||||||
_ ->
|
_ ->
|
||||||
scan_over_file(Handle,
|
scan_over_file(Handle,
|
||||||
|
|
|
@ -1277,6 +1277,7 @@ open_reader(Filename) ->
|
||||||
<<FileVersion:8/integer,
|
<<FileVersion:8/integer,
|
||||||
SlotsLength:32/integer,
|
SlotsLength:32/integer,
|
||||||
SummaryLength:32/integer>> = Lengths,
|
SummaryLength:32/integer>> = Lengths,
|
||||||
|
ok = file:advise(Handle, 9, SlotsLength, will_need),
|
||||||
{ok, SummaryBin} = file:pread(Handle, SlotsLength + 9, SummaryLength),
|
{ok, SummaryBin} = file:pread(Handle, SlotsLength + 9, SummaryLength),
|
||||||
{Handle, FileVersion, SummaryBin}.
|
{Handle, FileVersion, SummaryBin}.
|
||||||
|
|
||||||
|
|
|
@ -1214,8 +1214,8 @@ dollar_bucket_index(_Config) ->
|
||||||
bigobject_memorycheck(_Config) ->
|
bigobject_memorycheck(_Config) ->
|
||||||
RootPath = testutil:reset_filestructure(),
|
RootPath = testutil:reset_filestructure(),
|
||||||
{ok, Bookie} = leveled_bookie:book_start(RootPath,
|
{ok, Bookie} = leveled_bookie:book_start(RootPath,
|
||||||
100,
|
200,
|
||||||
100000000,
|
1000000000,
|
||||||
testutil:sync_strategy()),
|
testutil:sync_strategy()),
|
||||||
Bucket = <<"B">>,
|
Bucket = <<"B">>,
|
||||||
IndexGen = fun() -> [] end,
|
IndexGen = fun() -> [] end,
|
||||||
|
@ -1227,7 +1227,7 @@ bigobject_memorycheck(_Config) ->
|
||||||
{Obj, Spc} = testutil:set_object(Bucket, Key, Value, IndexGen, []),
|
{Obj, Spc} = testutil:set_object(Bucket, Key, Value, IndexGen, []),
|
||||||
testutil:book_riakput(Bookie, Obj, Spc)
|
testutil:book_riakput(Bookie, Obj, Spc)
|
||||||
end,
|
end,
|
||||||
lists:foreach(ObjPutFun, lists:seq(1, 600)),
|
lists:foreach(ObjPutFun, lists:seq(1, 700)),
|
||||||
{ok, _Ink, Pcl} = leveled_bookie:book_returnactors(Bookie),
|
{ok, _Ink, Pcl} = leveled_bookie:book_returnactors(Bookie),
|
||||||
{binary, BL} = process_info(Pcl, binary),
|
{binary, BL} = process_info(Pcl, binary),
|
||||||
{memory, M0} = process_info(Pcl, memory),
|
{memory, M0} = process_info(Pcl, memory),
|
||||||
|
@ -1235,5 +1235,54 @@ bigobject_memorycheck(_Config) ->
|
||||||
io:format("Pcl binary memory ~w ~w memory ~w~n", [B0, length(BL), M0]),
|
io:format("Pcl binary memory ~w ~w memory ~w~n", [B0, length(BL), M0]),
|
||||||
true = B0 < 500 * 4000,
|
true = B0 < 500 * 4000,
|
||||||
true = M0 < 500 * 4000,
|
true = M0 < 500 * 4000,
|
||||||
|
% All processes
|
||||||
|
{_TotalCDBBinMem, _TotalCDBProcesses} = cdb_memory_check(),
|
||||||
ok = leveled_bookie:book_close(Bookie),
|
ok = leveled_bookie:book_close(Bookie),
|
||||||
testutil:reset_filestructure().
|
{ok, BookieR} = leveled_bookie:book_start(RootPath,
|
||||||
|
2000,
|
||||||
|
1000000000,
|
||||||
|
testutil:sync_strategy()),
|
||||||
|
{RS_TotalCDBBinMem, _RS_TotalCDBProcesses} = cdb_memory_check(),
|
||||||
|
true = RS_TotalCDBBinMem < 1024 * 1024,
|
||||||
|
% No binary object references exist after startup
|
||||||
|
ok = leveled_bookie:book_close(BookieR),
|
||||||
|
testutil:reset_filestructure().
|
||||||
|
|
||||||
|
|
||||||
|
cdb_memory_check() ->
|
||||||
|
TotalCDBProcesses =
|
||||||
|
lists:filter(fun(P) ->
|
||||||
|
{dictionary, PD} =
|
||||||
|
process_info(P, dictionary),
|
||||||
|
case lists:keyfind('$initial_call', 1, PD) of
|
||||||
|
{'$initial_call',{leveled_cdb,init,1}} ->
|
||||||
|
true;
|
||||||
|
_ ->
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end,
|
||||||
|
processes()),
|
||||||
|
TotalCDBBinMem =
|
||||||
|
lists:foldl(fun(P, Acc) ->
|
||||||
|
BinMem = calc_total_binary_memory(P),
|
||||||
|
io:format("Memory for pid ~w is ~w~n", [P, BinMem]),
|
||||||
|
BinMem + Acc
|
||||||
|
end,
|
||||||
|
0,
|
||||||
|
TotalCDBProcesses),
|
||||||
|
io:format("Total binary memory ~w in ~w CDB processes~n",
|
||||||
|
[TotalCDBBinMem, length(TotalCDBProcesses)]),
|
||||||
|
{TotalCDBBinMem, TotalCDBProcesses}.
|
||||||
|
|
||||||
|
calc_total_binary_memory(Pid) ->
|
||||||
|
{binary, BL} = process_info(Pid, binary),
|
||||||
|
TBM = lists:foldl(fun({_R, Sz, _C}, Acc) -> Acc + Sz end, 0, BL),
|
||||||
|
case TBM > 1000000 of
|
||||||
|
true ->
|
||||||
|
FilteredBL =
|
||||||
|
lists:filter(fun(BMD) -> element(2, BMD) > 1024 end, BL),
|
||||||
|
io:format("Big-ref details for ~w ~w~n", [Pid, FilteredBL]);
|
||||||
|
false ->
|
||||||
|
ok
|
||||||
|
end,
|
||||||
|
TBM.
|
Loading…
Add table
Add a link
Reference in a new issue