Merge branch 'master' into mas-i311-mergeselector

This commit is contained in:
Martin Sumner 2020-03-30 20:07:05 +01:00
commit 9e56bfa947
13 changed files with 859 additions and 239 deletions

View file

@ -2,11 +2,14 @@
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl").
-export([all/0]).
-export([application_defined_tag/1
-export([
application_defined_tag/1,
bespoketag_recalc/1
]).
all() -> [
application_defined_tag
application_defined_tag,
bespoketag_recalc
].
@ -62,6 +65,8 @@ application_defined_tag_tester(KeyCount, Tag, Functions, ExpectMD) ->
StartOpts1 = [{root_path, RootPath},
{sync_strategy, testutil:sync_strategy()},
{log_level, warn},
{reload_strategy,
[{bespoke_tag1, retain}, {bespoke_tag2, retain}]},
{override_functions, Functions}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
Value = leveled_rand:rand_bytes(512),
@ -107,8 +112,6 @@ application_defined_tag_tester(KeyCount, Tag, Functions, ExpectMD) ->
ok = leveled_bookie:book_close(Bookie2).
object_generator(Count, V) ->
Hash = erlang:phash2({count, V}),
@ -118,4 +121,114 @@ object_generator(Count, V) ->
{Bucket,
Key,
[{hash, Hash}, {shard, Count rem 10},
{random, Random}, {value, V}]}.
{random, Random}, {value, V}]}.
bespoketag_recalc(_Config) ->
%% Get a sensible behaviour using the recalc compaction strategy with a
%% bespoke tag
RootPath = testutil:reset_filestructure(),
B0 = <<"B0">>,
KeyCount = 7000,
ExtractMDFun =
fun(bespoke_tag, Size, Obj) ->
[{index, IL}, {value, _V}] = Obj,
{{erlang:phash2(term_to_binary(Obj)),
Size,
{index, IL}},
[os:timestamp()]}
end,
CalcIndexFun =
fun(bespoke_tag, UpdMeta, PrvMeta) ->
% io:format("UpdMeta ~w PrvMeta ~w~n", [UpdMeta, PrvMeta]),
{index, UpdIndexes} = element(3, UpdMeta),
IndexDeltas =
case PrvMeta of
not_present ->
UpdIndexes;
PrvMeta when is_tuple(PrvMeta) ->
{index, PrvIndexes} = element(3, PrvMeta),
lists:subtract(UpdIndexes, PrvIndexes)
end,
lists:map(fun(I) -> {add, <<"temp_int">>, I} end, IndexDeltas)
end,
BookOpts = [{root_path, RootPath},
{cache_size, 1000},
{max_journalobjectcount, 6000},
{max_pencillercachesize, 8000},
{sync_strategy, testutil:sync_strategy()},
{reload_strategy, [{bespoke_tag, recalc}]},
{override_functions,
[{extract_metadata, ExtractMDFun},
{diff_indexspecs, CalcIndexFun}]}],
{ok, Book1} = leveled_bookie:book_start(BookOpts),
LoadFun =
fun(Book, MustFind) ->
fun(I) ->
testutil:stdload_object(Book,
B0, integer_to_binary(I rem KeyCount),
I, erlang:phash2({value, I}),
infinity, bespoke_tag, false, MustFind)
end
end,
lists:foreach(LoadFun(Book1, false), lists:seq(1, KeyCount)),
lists:foreach(LoadFun(Book1, true), lists:seq(KeyCount + 1, KeyCount * 2)),
FoldFun =
fun(_B0, {IV0, _K0}, Acc) ->
case IV0 - 1 of
Acc ->
Acc + 1;
_Unexpected ->
% io:format("Eh? - ~w ~w~n", [Unexpected, Acc]),
Acc + 1
end
end,
CountFold =
fun(Book, CurrentCount) ->
leveled_bookie:book_indexfold(Book,
B0,
{FoldFun, 0},
{<<"temp_int">>, 0, CurrentCount},
{true, undefined})
end,
{async, FolderA} = CountFold(Book1, 2 * KeyCount),
CountA = FolderA(),
io:format("Counted double index entries ~w - everything loaded OK~n",
[CountA]),
true = 2 * KeyCount == CountA,
ok = leveled_bookie:book_close(Book1),
{ok, Book2} = leveled_bookie:book_start(BookOpts),
lists:foreach(LoadFun(Book2, true), lists:seq(KeyCount * 2 + 1, KeyCount * 3)),
{async, FolderB} = CountFold(Book2, 3 * KeyCount),
CountB = FolderB(),
true = 3 * KeyCount == CountB,
testutil:compact_and_wait(Book2),
ok = leveled_bookie:book_close(Book2),
io:format("Restart from blank ledger~n"),
leveled_penciller:clean_testdir(proplists:get_value(root_path, BookOpts) ++
"/ledger"),
{ok, Book3} = leveled_bookie:book_start(BookOpts),
{async, FolderC} = CountFold(Book3, 3 * KeyCount),
CountC = FolderC(),
io:format("All index entries ~w present - recalc ok~n",
[CountC]),
true = 3 * KeyCount == CountC,
ok = leveled_bookie:book_close(Book3),
testutil:reset_filestructure().

View file

@ -7,7 +7,10 @@
hot_backup_simple/1,
hot_backup_changes/1,
retain_strategy/1,
recalc_strategy/1,
recalc_transition_strategy/1,
recovr_strategy/1,
stdtag_recalc/1,
aae_missingjournal/1,
aae_bustedjournal/1,
journal_compaction_bustedjournal/1,
@ -21,13 +24,16 @@ all() -> [
hot_backup_simple,
hot_backup_changes,
retain_strategy,
recalc_strategy,
recalc_transition_strategy,
recovr_strategy,
aae_missingjournal,
aae_bustedjournal,
journal_compaction_bustedjournal,
close_duringcompaction,
allkeydelta_journal_multicompact,
recompact_keydeltas
recompact_keydeltas,
stdtag_recalc
].
@ -145,8 +151,6 @@ recovery_with_samekeyupdates(_Config) ->
testutil:reset_filestructure().
hot_backup_simple(_Config) ->
% The journal may have a hot backup. This allows for an online Bookie
% to be sent a message to prepare a backup function, which an asynchronous
@ -233,85 +237,172 @@ hot_backup_changes(_Config) ->
testutil:reset_filestructure().
retain_strategy(_Config) ->
rotate_wipe_compact(retain, retain).
recalc_strategy(_Config) ->
rotate_wipe_compact(recalc, recalc).
recalc_transition_strategy(_Config) ->
rotate_wipe_compact(retain, recalc).
rotate_wipe_compact(Strategy1, Strategy2) ->
RootPath = testutil:reset_filestructure(),
BookOpts = [{root_path, RootPath},
{cache_size, 1000},
{max_journalobjectcount, 5000},
{sync_strategy, testutil:sync_strategy()},
{reload_strategy, [{?RIAK_TAG, retain}]}],
{reload_strategy, [{?RIAK_TAG, Strategy1}]}],
BookOptsAlt = [{root_path, RootPath},
{cache_size, 1000},
{max_journalobjectcount, 2000},
{sync_strategy, testutil:sync_strategy()},
{reload_strategy, [{?RIAK_TAG, retain}]},
{reload_strategy, [{?RIAK_TAG, Strategy2}]},
{max_run_length, 8}],
{ok, Spcl3, LastV3} = rotating_object_check(BookOpts, "Bucket3", 800),
{ok, Spcl3, LastV3} = rotating_object_check(BookOpts, "Bucket3", 400),
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3}]),
{ok, Spcl4, LastV4} = rotating_object_check(BookOpts, "Bucket4", 1600),
{ok, Spcl4, LastV4} = rotating_object_check(BookOpts, "Bucket4", 800),
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3},
{"Bucket4", Spcl4, LastV4}]),
{ok, Spcl5, LastV5} = rotating_object_check(BookOpts, "Bucket5", 3200),
ok = restart_from_blankledger(BookOptsAlt, [{"Bucket3", Spcl3, LastV3},
{"Bucket5", Spcl5, LastV5}]),
{ok, Spcl6, LastV6} = rotating_object_check(BookOpts, "Bucket6", 6400),
{ok, Spcl5, LastV5} = rotating_object_check(BookOpts, "Bucket5", 1600),
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3},
{"Bucket4", Spcl4, LastV4},
{"Bucket5", Spcl5, LastV5},
{"Bucket6", Spcl6, LastV6}]),
{"Bucket5", Spcl5, LastV5}]),
{ok, Spcl6, LastV6} = rotating_object_check(BookOpts, "Bucket6", 3200),
{ok, Book1} = leveled_bookie:book_start(BookOpts),
compact_and_wait(Book1),
compact_and_wait(Book1),
ok = leveled_bookie:book_close(Book1),
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3},
ok = restart_from_blankledger(BookOptsAlt, [{"Bucket3", Spcl3, LastV3},
{"Bucket4", Spcl4, LastV4},
{"Bucket5", Spcl5, LastV5},
{"Bucket6", Spcl6, LastV6}]),
{ok, Book2} = leveled_bookie:book_start(BookOptsAlt),
compact_and_wait(Book2),
ok = leveled_bookie:book_close(Book2),
{KSpcL2, _V2} = testutil:put_indexed_objects(Book2, "AltBucket6", 3000),
ok = restart_from_blankledger(BookOptsAlt, [{"Bucket3", Spcl3, LastV3},
{"Bucket4", Spcl4, LastV4},
{"Bucket5", Spcl5, LastV5},
{"Bucket6", Spcl6, LastV6}]),
{ok, Book3} = leveled_bookie:book_start(BookOptsAlt),
{KSpcL2, _V2} = testutil:put_indexed_objects(Book3, "AltBucket6", 3000),
Q2 = fun(RT) -> {index_query,
"AltBucket6",
{fun testutil:foldkeysfun/3, []},
{"idx1_bin", "#", "|"},
{RT, undefined}}
end,
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book2, Q2(false)),
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
KeyList2A = lists:usort(KFolder2A()),
true = length(KeyList2A) == 3000,
DeleteFun =
fun({DK, [{add, DIdx, DTerm}]}) ->
ok = testutil:book_riakdelete(Book2,
ok = testutil:book_riakdelete(Book3,
"AltBucket6",
DK,
[{remove, DIdx, DTerm}])
end,
lists:foreach(DeleteFun, KSpcL2),
{async, KFolder2AD} = leveled_bookie:book_returnfolder(Book2, Q2(false)),
KeyList2AD = lists:usort(KFolder2AD()),
true = length(KeyList2AD) == 0,
ok = leveled_bookie:book_close(Book2),
{ok, Book3} = leveled_bookie:book_start(BookOptsAlt),
io:format("Compact after deletions~n"),
compact_and_wait(Book3),
compact_and_wait(Book3),
{async, KFolder3AD} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
KeyList3AD = lists:usort(KFolder3AD()),
true = length(KeyList3AD) == 0,
ok = leveled_bookie:book_close(Book3),
{ok, Book4} = leveled_bookie:book_start(BookOptsAlt),
io:format("Compact after deletions~n"),
compact_and_wait(Book4),
{async, KFolder4AD} = leveled_bookie:book_returnfolder(Book4, Q2(false)),
KeyList4AD = lists:usort(KFolder4AD()),
true = length(KeyList4AD) == 0,
ok = leveled_bookie:book_close(Book4),
testutil:reset_filestructure().
stdtag_recalc(_Config) ->
%% Setting the ?STD_TAG to do recalc, should result in the ?STD_TAG
%% behaving like recovr - as no recalc is done for ?STD_TAG
%% NOTE -This is a test to confirm bad things happen!
RootPath = testutil:reset_filestructure(),
B0 = <<"B0">>,
KeyCount = 7000,
BookOpts = [{root_path, RootPath},
{cache_size, 1000},
{max_journalobjectcount, 5000},
{max_pencillercachesize, 10000},
{sync_strategy, testutil:sync_strategy()},
{reload_strategy, [{?STD_TAG, recalc}]}],
{ok, Book1} = leveled_bookie:book_start(BookOpts),
LoadFun =
fun(Book) ->
fun(I) ->
testutil:stdload_object(Book,
B0, erlang:phash2(I rem KeyCount),
I, erlang:phash2({value, I}),
infinity, ?STD_TAG, false, false)
end
end,
lists:foreach(LoadFun(Book1), lists:seq(1, KeyCount)),
lists:foreach(LoadFun(Book1), lists:seq(KeyCount + 1, KeyCount * 2)),
CountFold =
fun(Book, CurrentCount) ->
leveled_bookie:book_indexfold(Book,
B0,
{fun(_BF, _KT, Acc) -> Acc + 1 end,
0},
{<<"temp_int">>, 0, CurrentCount},
{true, undefined})
end,
{async, FolderA} = CountFold(Book1, 2 * KeyCount),
CountA = FolderA(),
io:format("Counted double index entries ~w - everything loaded OK~n",
[CountA]),
true = 2 * KeyCount == CountA,
ok = leveled_bookie:book_close(Book1),
{ok, Book2} = leveled_bookie:book_start(BookOpts),
lists:foreach(LoadFun(Book2), lists:seq(KeyCount * 2 + 1, KeyCount * 3)),
{async, FolderB} = CountFold(Book2, 3 * KeyCount),
CountB = FolderB(),
io:format("Maybe counted less index entries ~w - everything not loaded~n",
[CountB]),
true = 3 * KeyCount >= CountB,
compact_and_wait(Book2),
ok = leveled_bookie:book_close(Book2),
io:format("Restart from blank ledger"),
leveled_penciller:clean_testdir(proplists:get_value(root_path, BookOpts) ++
"/ledger"),
{ok, Book3} = leveled_bookie:book_start(BookOpts),
{async, FolderC} = CountFold(Book3, 3 * KeyCount),
CountC = FolderC(),
io:format("Missing index entries ~w - recalc not supported on ?STD_TAG~n",
[CountC]),
true = 3 * KeyCount > CountC,
ok = leveled_bookie:book_close(Book3),
testutil:reset_filestructure().
@ -845,6 +936,10 @@ rotating_object_check(BookOpts, B, NumberOfObjects) ->
B,
KSpcL2,
false),
ok = testutil:check_indexed_objects(Book1,
B,
KSpcL1 ++ KSpcL2 ++ KSpcL3,
V3),
ok = leveled_bookie:book_close(Book1),
{ok, Book2} = leveled_bookie:book_start(BookOpts),
ok = testutil:check_indexed_objects(Book2,

View file

@ -905,7 +905,7 @@ handoff(_Config) ->
{sync_strategy, sync}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
% Add some noe Riak objects in - which should be ignored in folds.
% Add some none Riak objects in - which should be ignored in folds.
Hashes = testutil:stdload(Bookie1, 1000),
% Generate 200K objects to be used within the test, and load them into
% the first store (outputting the generated objects as a list of lists)

View file

@ -10,6 +10,7 @@
stdload/2,
stdload_expiring/3,
stdload_object/6,
stdload_object/9,
reset_filestructure/0,
reset_filestructure/1,
check_bucket_stats/2,
@ -59,7 +60,8 @@
get_value_from_objectlistitem/1,
numbered_key/1,
fixed_bin_key/1,
convert_to_seconds/1]).
convert_to_seconds/1,
compact_and_wait/1]).
-define(RETURN_TERMS, {true, undefined}).
-define(SLOWOFFER_DELAY, 5).
@ -68,6 +70,7 @@
-define(MD_VTAG, <<"X-Riak-VTag">>).
-define(MD_LASTMOD, <<"X-Riak-Last-Modified">>).
-define(MD_DELETED, <<"X-Riak-Deleted">>).
-define(MD_INDEX, <<"index">>).
-define(EMPTY_VTAG_BIN, <<"e">>).
-define(ROOT_PATH, "test").
@ -240,17 +243,35 @@ stdload_expiring(Book, KeyCount, TTL, V, Acc) ->
stdload_expiring(Book, KeyCount - 1, TTL, V, [{I, B, K}|Acc]).
stdload_object(Book, B, K, I, V, TTL) ->
Obj = [{index, I}, {value, V}],
IdxSpecs =
case leveled_bookie:book_get(Book, B, K) of
{ok, PrevObj} ->
{index, OldI} = lists:keyfind(index, 1, PrevObj),
io:format("Remove index ~w for ~w~n", [OldI, I]),
[{remove, <<"temp_int">>, OldI}, {add, <<"temp_int">>, I}];
not_found ->
[{add, <<"temp_int">>, I}]
stdload_object(Book, B, K, I, V, TTL, ?STD_TAG, true, false).
stdload_object(Book, B, K, I, V, TTL, Tag, RemovePrev2i, MustFind) ->
Obj = [{index, [I]}, {value, V}],
{IdxSpecs, Obj0} =
case {leveled_bookie:book_get(Book, B, K, Tag), MustFind} of
{{ok, PrevObj}, _} ->
{index, PrevIs} = lists:keyfind(index, 1, PrevObj),
case RemovePrev2i of
true ->
MapFun =
fun(OldI) -> {remove, <<"temp_int">>, OldI} end,
{[{add, <<"temp_int">>, I}|lists:map(MapFun, PrevIs)],
Obj};
false ->
{[{add, <<"temp_int">>, I}],
[{index, [I|PrevIs]}, {value, V}]}
end;
{not_found, false} ->
{[{add, <<"temp_int">>, I}], Obj}
end,
R =
case TTL of
infinity ->
leveled_bookie:book_put(Book, B, K, Obj0, IdxSpecs, Tag);
TTL when is_integer(TTL) ->
leveled_bookie:book_tempput(Book, B, K, Obj0,
IdxSpecs, Tag, TTL)
end,
R = leveled_bookie:book_tempput(Book, B, K, Obj, IdxSpecs, ?STD_TAG, TTL),
case R of
ok ->
ok;
@ -261,6 +282,7 @@ stdload_object(Book, B, K, I, V, TTL) ->
reset_filestructure() ->
reset_filestructure(0, ?ROOT_PATH).
@ -517,23 +539,30 @@ set_object(Bucket, Key, Value, IndexGen) ->
set_object(Bucket, Key, Value, IndexGen, []).
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, []).
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, IndexesNotToRemove) ->
IdxSpecs = IndexGen(),
Indexes =
lists:map(fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end,
IdxSpecs ++ IndexesNotToRemove),
Obj = {Bucket,
Key,
Value,
IndexGen() ++ lists:map(fun({add, IdxF, IdxV}) ->
{remove, IdxF, IdxV} end,
Indexes2Remove),
[{"MDK", "MDV" ++ Key},
{"MDK2", "MDV" ++ Key},
{?MD_LASTMOD, os:timestamp()}]},
{B1, K1, V1, Spec1, MD} = Obj,
IdxSpecs ++
lists:map(fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end,
Indexes2Remove),
[{<<"MDK">>, "MDV" ++ Key},
{<<"MDK2">>, "MDV" ++ Key},
{?MD_LASTMOD, os:timestamp()},
{?MD_INDEX, Indexes}]},
{B1, K1, V1, DeltaSpecs, MD} = Obj,
Content = #r_content{metadata=dict:from_list(MD), value=V1},
{#r_object{bucket=B1,
key=K1,
contents=[Content],
vclock=generate_vclock()},
Spec1}.
DeltaSpecs}.
get_value_from_objectlistitem({_Int, Obj, _Spc}) ->
[Content] = Obj#r_object.contents,
@ -762,26 +791,39 @@ put_altered_indexed_objects(Book, Bucket, KSpecL) ->
put_altered_indexed_objects(Book, Bucket, KSpecL, true).
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i) ->
IndexGen = testutil:get_randomindexes_generator(1),
V = testutil:get_compressiblevalue(),
RplKSpecL = lists:map(fun({K, Spc}) ->
AddSpc = if
RemoveOld2i == true ->
[lists:keyfind(add, 1, Spc)];
RemoveOld2i == false ->
[]
end,
{O, AltSpc} = testutil:set_object(Bucket,
K,
V,
IndexGen,
AddSpc),
case book_riakput(Book, O, AltSpc) of
ok -> ok;
pause -> timer:sleep(?SLOWOFFER_DELAY)
end,
{K, AltSpc} end,
KSpecL),
IndexGen = get_randomindexes_generator(1),
V = get_compressiblevalue(),
FindAdditionFun = fun(SpcItem) -> element(1, SpcItem) == add end,
MapFun =
fun({K, Spc}) ->
OldSpecs = lists:filter(FindAdditionFun, Spc),
{RemoveSpc, AddSpc} =
case RemoveOld2i of
true ->
{OldSpecs, []};
false ->
{[], OldSpecs}
end,
{O, DeltaSpecs} =
set_object(Bucket, K, V,
IndexGen, RemoveSpc, AddSpc),
% DeltaSpecs should be new indexes added, and any old indexes which
% have been removed by this change where RemoveOld2i is true.
%
% The actual indexes within the object should reflect any history
% of indexes i.e. when RemoveOld2i is false.
%
% The [{Key, SpecL}] returned should accrue additions over loops if
% RemoveOld2i is false
case book_riakput(Book, O, DeltaSpecs) of
ok -> ok;
pause -> timer:sleep(?SLOWOFFER_DELAY)
end,
% Note that order in the SpecL is important, as
% check_indexed_objects, needs to find the latest item added
{K, DeltaSpecs ++ AddSpc}
end,
RplKSpecL = lists:map(MapFun, KSpecL),
{RplKSpecL, V}.
rotating_object_check(RootPath, B, NumberOfObjects) ->
@ -790,16 +832,16 @@ rotating_object_check(RootPath, B, NumberOfObjects) ->
{max_journalsize, 5000000},
{sync_strategy, sync_strategy()}],
{ok, Book1} = leveled_bookie:book_start(BookOpts),
{KSpcL1, V1} = testutil:put_indexed_objects(Book1, B, NumberOfObjects),
ok = testutil:check_indexed_objects(Book1, B, KSpcL1, V1),
{KSpcL2, V2} = testutil:put_altered_indexed_objects(Book1, B, KSpcL1),
ok = testutil:check_indexed_objects(Book1, B, KSpcL2, V2),
{KSpcL3, V3} = testutil:put_altered_indexed_objects(Book1, B, KSpcL2),
{KSpcL1, V1} = put_indexed_objects(Book1, B, NumberOfObjects),
ok = check_indexed_objects(Book1, B, KSpcL1, V1),
{KSpcL2, V2} = put_altered_indexed_objects(Book1, B, KSpcL1),
ok = check_indexed_objects(Book1, B, KSpcL2, V2),
{KSpcL3, V3} = put_altered_indexed_objects(Book1, B, KSpcL2),
ok = leveled_bookie:book_close(Book1),
{ok, Book2} = leveled_bookie:book_start(BookOpts),
ok = testutil:check_indexed_objects(Book2, B, KSpcL3, V3),
{KSpcL4, V4} = testutil:put_altered_indexed_objects(Book2, B, KSpcL3),
ok = testutil:check_indexed_objects(Book2, B, KSpcL4, V4),
ok = check_indexed_objects(Book2, B, KSpcL3, V3),
{KSpcL4, V4} = put_altered_indexed_objects(Book2, B, KSpcL3),
ok = check_indexed_objects(Book2, B, KSpcL4, V4),
Query = {keylist, ?RIAK_TAG, B, {fun foldkeysfun/3, []}},
{async, BList} = leveled_bookie:book_returnfolder(Book2, Query),
true = NumberOfObjects == length(BList()),
@ -839,16 +881,9 @@ restore_topending(RootPath, FileName) ->
find_journals(RootPath) ->
{ok, FNsA_J} = file:list_dir(RootPath ++ "/journal/journal_files"),
{ok, Regex} = re:compile(".*\.cdb"),
CDBFiles = lists:foldl(fun(FN, Acc) -> case re:run(FN, Regex) of
nomatch ->
Acc;
_ ->
[FN|Acc]
end
end,
[],
FNsA_J),
% Must not return a file with the .pnd extension
CDBFiles =
lists:filter(fun(FN) -> filename:extension(FN) == ".cdb" end, FNsA_J),
CDBFiles.
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
@ -862,4 +897,25 @@ get_aae_segment(Obj) ->
get_aae_segment({Type, Bucket}, Key) ->
leveled_tictac:keyto_segment32(<<Type/binary, Bucket/binary, Key/binary>>);
get_aae_segment(Bucket, Key) ->
leveled_tictac:keyto_segment32(<<Bucket/binary, Key/binary>>).
leveled_tictac:keyto_segment32(<<Bucket/binary, Key/binary>>).
compact_and_wait(Book) ->
compact_and_wait(Book, 20000).
compact_and_wait(Book, WaitForDelete) ->
ok = leveled_bookie:book_compactjournal(Book, 30000),
F = fun leveled_bookie:book_islastcompactionpending/1,
lists:foldl(fun(X, Pending) ->
case Pending of
false ->
false;
true ->
io:format("Loop ~w waiting for journal "
++ "compaction to complete~n", [X]),
timer:sleep(20000),
F(Book)
end end,
true,
lists:seq(1, 15)),
io:format("Waiting for journal deletes~n"),
timer:sleep(WaitForDelete).