Memory management

Extracting binary from within a binary leaves a reference to the whole of the original binary.

If there are a lot of very large objects received abck toback - this can explode the amount of memory the penciller appears to hold (and gc cannot resolve this).

To dereference from the larger binary, need to do a binary copy
This commit is contained in:
Martin Sumner 2019-06-15 17:23:06 +01:00
parent c6a873bc15
commit 952f088873
3 changed files with 45 additions and 4 deletions

View file

@ -99,6 +99,10 @@
fetch_value/2, fetch_value/2,
journal_notfound/4]). journal_notfound/4]).
-ifdef(TEST).
-export([book_returnactors/1]).
-endif.
-include_lib("eunit/include/eunit.hrl"). -include_lib("eunit/include/eunit.hrl").
-define(CACHE_SIZE, 2500). -define(CACHE_SIZE, 2500).
@ -1114,6 +1118,11 @@ book_addlogs(Pid, ForcedLogs) ->
book_removelogs(Pid, ForcedLogs) -> book_removelogs(Pid, ForcedLogs) ->
gen_server:cast(Pid, {remove_logs, ForcedLogs}). gen_server:cast(Pid, {remove_logs, ForcedLogs}).
%% @doc
%% Return the Inker and Penciller - {ok, Inker, Penciller}. Used only in tests
book_returnactors(Pid) ->
gen_server:call(Pid, return_actors).
%%%============================================================================ %%%============================================================================
%%% gen_server callbacks %%% gen_server callbacks
@ -1444,6 +1453,8 @@ handle_call(destroy, _From, State=#state{is_snapshot=Snp}) when Snp == false ->
lists:foreach(fun(DirPath) -> delete_path(DirPath) end, InkPathList), lists:foreach(fun(DirPath) -> delete_path(DirPath) end, InkPathList),
lists:foreach(fun(DirPath) -> delete_path(DirPath) end, PCLPathList), lists:foreach(fun(DirPath) -> delete_path(DirPath) end, PCLPathList),
{stop, normal, ok, State}; {stop, normal, ok, State};
handle_call(return_actors, _From, State) ->
{reply, {ok, State#state.inker, State#state.penciller}, State};
handle_call(Msg, _From, State) -> handle_call(Msg, _From, State) ->
{reply, {unsupported_message, element(1, Msg)}, State}. {reply, {unsupported_message, element(1, Msg)}, State}.
@ -2680,6 +2691,7 @@ folder_cache_test(CacheSize) ->
{ok, Bookie1} = book_start([{root_path, RootPath}, {ok, Bookie1} = book_start([{root_path, RootPath},
{max_journalsize, 1000000}, {max_journalsize, 1000000},
{cache_size, CacheSize}]), {cache_size, CacheSize}]),
_ = book_returnactors(Bookie1),
ObjL1 = generate_multiple_objects(400, 1), ObjL1 = generate_multiple_objects(400, 1),
ObjL2 = generate_multiple_objects(400, 1), ObjL2 = generate_multiple_objects(400, 1),
% Put in all the objects with a TTL in the future % Put in all the objects with a TTL in the future

View file

@ -263,8 +263,8 @@ riak_extract_metadata(delete, Size) ->
{{delete, null, null, Size}, []}; {{delete, null, null, Size}, []};
riak_extract_metadata(ObjBin, Size) -> riak_extract_metadata(ObjBin, Size) ->
{VclockBin, SibBin, LastMods} = riak_metadata_from_binary(ObjBin), {VclockBin, SibBin, LastMods} = riak_metadata_from_binary(ObjBin),
{{SibBin, {{binary:copy(SibBin),
VclockBin, binary:copy(VclockBin),
erlang:phash2(lists:sort(binary_to_term(VclockBin))), erlang:phash2(lists:sort(binary_to_term(VclockBin))),
Size}, Size},
LastMods}. LastMods}.

View file

@ -8,7 +8,8 @@
crossbucket_aae/1, crossbucket_aae/1,
handoff/1, handoff/1,
dollar_bucket_index/1, dollar_bucket_index/1,
dollar_key_index/1 dollar_key_index/1,
bigobject_memorycheck/1
]). ]).
all() -> [ all() -> [
@ -17,7 +18,8 @@ all() -> [
crossbucket_aae, crossbucket_aae,
handoff, handoff,
dollar_bucket_index, dollar_bucket_index,
dollar_key_index dollar_key_index,
bigobject_memorycheck
]. ].
-define(MAGIC, 53). % riak_kv -> riak_object -define(MAGIC, 53). % riak_kv -> riak_object
@ -1208,3 +1210,30 @@ dollar_bucket_index(_Config) ->
ok = leveled_bookie:book_close(Bookie1), ok = leveled_bookie:book_close(Bookie1),
testutil:reset_filestructure(). testutil:reset_filestructure().
bigobject_memorycheck(_Config) ->
RootPath = testutil:reset_filestructure(),
{ok, Bookie} = leveled_bookie:book_start(RootPath,
100,
100000000,
testutil:sync_strategy()),
Bucket = <<"B">>,
IndexGen = fun() -> [] end,
ObjPutFun =
fun(I) ->
Key = base64:encode(<<I:32/integer>>),
Value = leveled_rand:rand_bytes(1024 * 1024),
% a big object each time!
{Obj, Spc} = testutil:set_object(Bucket, Key, Value, IndexGen, []),
testutil:book_riakput(Bookie, Obj, Spc)
end,
lists:foreach(ObjPutFun, lists:seq(1, 600)),
{ok, _Ink, Pcl} = leveled_bookie:book_returnactors(Bookie),
{binary, BL} = process_info(Pcl, binary),
{memory, M0} = process_info(Pcl, memory),
B0 = lists:foldl(fun({_R, Sz, _C}, Acc) -> Acc + Sz end, 0, BL),
io:format("Pcl binary memory ~w ~w memory ~w~n", [B0, length(BL), M0]),
true = B0 < 500 * 4000,
true = M0 < 500 * 4000,
ok = leveled_bookie:book_close(Bookie),
testutil:reset_filestructure().