2016-10-31 20:58:19 +00:00
|
|
|
-module(recovery_SUITE).
|
2016-10-27 00:57:19 +01:00
|
|
|
-include_lib("common_test/include/ct.hrl").
|
|
|
|
-include("include/leveled.hrl").
|
|
|
|
-export([all/0]).
|
2018-12-10 11:58:21 +00:00
|
|
|
-export([
|
|
|
|
recovery_with_samekeyupdates/1,
|
|
|
|
hot_backup_simple/1,
|
2018-09-07 17:24:27 +01:00
|
|
|
hot_backup_changes/1,
|
2018-09-06 17:50:30 +01:00
|
|
|
retain_strategy/1,
|
2020-03-15 22:14:42 +00:00
|
|
|
recalc_strategy/1,
|
|
|
|
recalc_transition_strategy/1,
|
2016-11-09 16:27:47 +00:00
|
|
|
recovr_strategy/1,
|
2020-03-16 12:51:14 +00:00
|
|
|
stdtag_recalc/1,
|
2017-04-21 15:55:03 +01:00
|
|
|
aae_missingjournal/1,
|
2016-11-01 00:46:14 +00:00
|
|
|
aae_bustedjournal/1,
|
2019-01-25 12:11:42 +00:00
|
|
|
journal_compaction_bustedjournal/1,
|
2019-07-24 18:03:22 +01:00
|
|
|
close_duringcompaction/1,
|
2019-07-26 21:43:00 +01:00
|
|
|
allkeydelta_journal_multicompact/1,
|
2020-11-27 02:35:27 +00:00
|
|
|
recompact_keydeltas/1,
|
|
|
|
simple_cachescoring/1
|
2016-10-27 00:57:19 +01:00
|
|
|
]).
|
|
|
|
|
|
|
|
all() -> [
|
2018-12-10 11:58:21 +00:00
|
|
|
recovery_with_samekeyupdates,
|
2018-09-06 17:50:30 +01:00
|
|
|
hot_backup_simple,
|
2018-09-07 17:24:27 +01:00
|
|
|
hot_backup_changes,
|
2016-11-09 22:06:02 +00:00
|
|
|
retain_strategy,
|
2020-03-15 22:14:42 +00:00
|
|
|
recalc_strategy,
|
|
|
|
recalc_transition_strategy,
|
2016-11-09 22:06:02 +00:00
|
|
|
recovr_strategy,
|
2017-04-21 15:55:03 +01:00
|
|
|
aae_missingjournal,
|
2016-11-09 22:06:02 +00:00
|
|
|
aae_bustedjournal,
|
2019-01-25 12:11:42 +00:00
|
|
|
journal_compaction_bustedjournal,
|
2019-07-24 18:03:22 +01:00
|
|
|
close_duringcompaction,
|
2019-07-26 21:43:00 +01:00
|
|
|
allkeydelta_journal_multicompact,
|
2020-03-16 12:51:14 +00:00
|
|
|
recompact_keydeltas,
|
2020-11-27 02:35:27 +00:00
|
|
|
stdtag_recalc,
|
|
|
|
simple_cachescoring
|
2016-10-27 00:57:19 +01:00
|
|
|
].
|
|
|
|
|
2018-09-06 17:50:30 +01:00
|
|
|
|
2019-01-25 12:11:42 +00:00
|
|
|
close_duringcompaction(_Config) ->
|
|
|
|
% Prompt a compaction, and close immedately - confirm that the close
|
|
|
|
% happens without error.
|
|
|
|
% This should trigger the iclerk to receive a close during the file
|
|
|
|
% scoring stage
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 2000},
|
|
|
|
{max_journalsize, 2000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, Spcl1, LastV1} = rotating_object_check(BookOpts, "Bucket1", 6400),
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
ok = leveled_bookie:book_compactjournal(Book1, 30000),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOpts),
|
|
|
|
ok = testutil:check_indexed_objects(Book2, "Bucket1", Spcl1, LastV1),
|
|
|
|
ok = leveled_bookie:book_close(Book2).
|
|
|
|
|
2018-12-10 11:58:21 +00:00
|
|
|
recovery_with_samekeyupdates(_Config) ->
|
|
|
|
% Setup to prove https://github.com/martinsumner/leveled/issues/229
|
|
|
|
% run a test that involves many updates to the same key, and check that
|
|
|
|
% this doesn't cause performance to flatline in either the normal "PUT"
|
|
|
|
% case, or in the case of the recovery from a lost keystore
|
2018-12-10 13:23:39 +00:00
|
|
|
AcceptableDuration = 180, % 3 minutes
|
2018-12-10 11:58:21 +00:00
|
|
|
E2E_SW = os:timestamp(), % Used to track time for overall job
|
|
|
|
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BackupPath = testutil:reset_filestructure("backupSKU"),
|
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 2000},
|
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
|
|
|
|
% Load in 5K different keys
|
|
|
|
% Load 5 keys in 5K times each
|
|
|
|
% Load in 5K different keys
|
|
|
|
io:format("Commence object generation and load~n"),
|
|
|
|
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
|
|
|
IndexGen = fun() -> [] end,
|
|
|
|
ObjL1 = testutil:generate_objects(5000,
|
|
|
|
{fixed_binary, 1},
|
|
|
|
[],
|
|
|
|
ObjectGen,
|
|
|
|
IndexGen,
|
|
|
|
<<"Bucket1">>),
|
|
|
|
testutil:riakload(Book1, ObjL1),
|
|
|
|
RepeatedLoadFun =
|
|
|
|
fun(_I, _Acc) ->
|
|
|
|
ObjRL =
|
|
|
|
testutil:generate_objects(5,
|
|
|
|
{fixed_binary, 5001},
|
|
|
|
[],
|
|
|
|
ObjectGen,
|
|
|
|
IndexGen,
|
|
|
|
<<"Bucket1">>),
|
|
|
|
testutil:riakload(Book1, ObjRL),
|
|
|
|
ObjRL
|
|
|
|
end,
|
|
|
|
FinalObjRL = lists:foldl(RepeatedLoadFun, [], lists:seq(1, 5000)),
|
|
|
|
ObjL2 = testutil:generate_objects(5000,
|
|
|
|
{fixed_binary, 6001},
|
|
|
|
[],
|
|
|
|
ObjectGen,
|
|
|
|
IndexGen,
|
|
|
|
<<"Bucket1">>),
|
|
|
|
testutil:riakload(Book1, ObjL2),
|
|
|
|
|
|
|
|
% Fetch all of ObjL1
|
|
|
|
io:format("Check for presence of unique objects~n"),
|
|
|
|
ok = testutil:checkhead_forlist(Book1, ObjL1),
|
|
|
|
% Fetch all of ObjL2
|
|
|
|
ok = testutil:checkhead_forlist(Book1, ObjL2),
|
|
|
|
io:format("Check for presence of repeated objects~n"),
|
|
|
|
% Fetch repeated objects 200 times each
|
|
|
|
CheckFun1 =
|
|
|
|
fun(_I) -> ok = testutil:checkhead_forlist(Book1, FinalObjRL) end,
|
|
|
|
lists:foreach(CheckFun1, lists:seq(1, 200)),
|
|
|
|
io:format("Checks complete~n"),
|
|
|
|
|
|
|
|
io:format("Backup - get journal with no ledger~n"),
|
|
|
|
{async, BackupFun} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun(BackupPath),
|
|
|
|
|
|
|
|
io:format("Restarting without key store~n"),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
|
|
|
|
BookOptsBackup = [{root_path, BackupPath},
|
|
|
|
{cache_size, 2000},
|
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOptsBackup),
|
|
|
|
|
|
|
|
% Fetch all of ObjL1
|
|
|
|
io:format("Check for presence of unique objects~n"),
|
|
|
|
ok = testutil:checkhead_forlist(Book2, ObjL1),
|
|
|
|
% Fetch all of ObjL2
|
|
|
|
ok = testutil:checkhead_forlist(Book2, ObjL2),
|
|
|
|
io:format("Check for presence of repeated objects~n"),
|
|
|
|
% Fetch repeated objects 200 times each
|
|
|
|
CheckFun2 =
|
|
|
|
fun(_I) -> ok = testutil:checkhead_forlist(Book2, FinalObjRL) end,
|
|
|
|
lists:foreach(CheckFun2, lists:seq(1, 200)),
|
|
|
|
io:format("Checks complete from backup~n"),
|
|
|
|
|
|
|
|
DurationOfTest = timer:now_diff(os:timestamp(), E2E_SW)/(1000 * 1000),
|
|
|
|
io:format("Duration of test was ~w s~n", [DurationOfTest]),
|
|
|
|
true = DurationOfTest < AcceptableDuration,
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
testutil:reset_filestructure(BackupPath),
|
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
|
|
|
|
2018-09-06 17:50:30 +01:00
|
|
|
hot_backup_simple(_Config) ->
|
|
|
|
% The journal may have a hot backup. This allows for an online Bookie
|
|
|
|
% to be sent a message to prepare a backup function, which an asynchronous
|
|
|
|
% worker can then call to generate a backup taken at the point in time
|
|
|
|
% the original message was processsed.
|
|
|
|
%
|
|
|
|
% The basic test is to:
|
|
|
|
% 1 - load a Bookie, take a backup, delete the original path, restore from
|
|
|
|
% that path
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
2018-09-07 21:03:09 +01:00
|
|
|
BackupPath = testutil:reset_filestructure("backup0"),
|
2018-09-06 17:50:30 +01:00
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
|
|
|
{max_journalsize, 10000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
2018-09-09 18:19:25 +01:00
|
|
|
{ok, Spcl1, LastV1} = rotating_object_check(BookOpts, "Bucket1", 3200),
|
2018-09-06 17:50:30 +01:00
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
{async, BackupFun} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun(BackupPath),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BookOptsBackup = [{root_path, BackupPath},
|
|
|
|
{cache_size, 2000},
|
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, BookBackup} = leveled_bookie:book_start(BookOptsBackup),
|
|
|
|
ok = testutil:check_indexed_objects(BookBackup, "Bucket1", Spcl1, LastV1),
|
|
|
|
ok = leveled_bookie:book_close(BookBackup),
|
|
|
|
BackupPath = testutil:reset_filestructure("backup0").
|
|
|
|
|
2018-09-07 17:24:27 +01:00
|
|
|
hot_backup_changes(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BackupPath = testutil:reset_filestructure("backup0"),
|
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
|
|
|
{max_journalsize, 10000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
B = "Bucket0",
|
|
|
|
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
{KSpcL1, _V1} = testutil:put_indexed_objects(Book1, B, 20000),
|
|
|
|
|
|
|
|
{async, BackupFun1} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun1(BackupPath),
|
|
|
|
{ok, FileList1} =
|
|
|
|
file:list_dir(filename:join(BackupPath, "journal/journal_files/")),
|
|
|
|
|
|
|
|
{KSpcL2, V2} = testutil:put_altered_indexed_objects(Book1, B, KSpcL1),
|
|
|
|
|
|
|
|
{async, BackupFun2} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun2(BackupPath),
|
|
|
|
{ok, FileList2} =
|
|
|
|
file:list_dir(filename:join(BackupPath, "journal/journal_files/")),
|
|
|
|
|
|
|
|
ok = testutil:check_indexed_objects(Book1, B, KSpcL2, V2),
|
|
|
|
compact_and_wait(Book1),
|
|
|
|
|
|
|
|
{async, BackupFun3} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun3(BackupPath),
|
|
|
|
{ok, FileList3} =
|
|
|
|
file:list_dir(filename:join(BackupPath, "journal/journal_files/")),
|
2018-09-07 21:03:09 +01:00
|
|
|
% Confirm null impact of backing up twice in a row
|
|
|
|
{async, BackupFun4} = leveled_bookie:book_hotbackup(Book1),
|
|
|
|
ok = BackupFun4(BackupPath),
|
|
|
|
{ok, FileList4} =
|
|
|
|
file:list_dir(filename:join(BackupPath, "journal/journal_files/")),
|
2018-09-07 17:24:27 +01:00
|
|
|
|
|
|
|
true = length(FileList2) > length(FileList1),
|
|
|
|
true = length(FileList2) > length(FileList3),
|
2018-09-07 21:03:09 +01:00
|
|
|
true = length(FileList3) == length(FileList4),
|
2018-09-07 17:24:27 +01:00
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BookOptsBackup = [{root_path, BackupPath},
|
|
|
|
{cache_size, 2000},
|
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, BookBackup} = leveled_bookie:book_start(BookOptsBackup),
|
|
|
|
|
|
|
|
ok = testutil:check_indexed_objects(BookBackup, B, KSpcL2, V2),
|
|
|
|
|
|
|
|
testutil:reset_filestructure("backup0"),
|
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
retain_strategy(_Config) ->
|
2020-03-15 22:14:42 +00:00
|
|
|
rotate_wipe_compact(retain, retain).
|
|
|
|
|
|
|
|
recalc_strategy(_Config) ->
|
|
|
|
rotate_wipe_compact(recalc, recalc).
|
|
|
|
|
|
|
|
recalc_transition_strategy(_Config) ->
|
|
|
|
rotate_wipe_compact(retain, recalc).
|
|
|
|
|
|
|
|
|
|
|
|
rotate_wipe_compact(Strategy1, Strategy2) ->
|
2016-10-27 00:57:19 +01:00
|
|
|
RootPath = testutil:reset_filestructure(),
|
2016-11-02 12:58:27 +00:00
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
2020-03-09 15:12:48 +00:00
|
|
|
{max_journalobjectcount, 5000},
|
2016-11-25 17:41:08 +00:00
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
2020-03-15 22:14:42 +00:00
|
|
|
{reload_strategy, [{?RIAK_TAG, Strategy1}]}],
|
2016-11-02 12:58:27 +00:00
|
|
|
BookOptsAlt = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
2020-03-09 15:12:48 +00:00
|
|
|
{max_journalobjectcount, 2000},
|
2016-11-25 17:41:08 +00:00
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
2020-03-15 22:14:42 +00:00
|
|
|
{reload_strategy, [{?RIAK_TAG, Strategy2}]},
|
2016-11-02 12:58:27 +00:00
|
|
|
{max_run_length, 8}],
|
2020-03-15 22:14:42 +00:00
|
|
|
{ok, Spcl3, LastV3} = rotating_object_check(BookOpts, "Bucket3", 400),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3}]),
|
2020-03-15 22:14:42 +00:00
|
|
|
{ok, Spcl4, LastV4} = rotating_object_check(BookOpts, "Bucket4", 800),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3},
|
|
|
|
{"Bucket4", Spcl4, LastV4}]),
|
2020-03-15 22:14:42 +00:00
|
|
|
{ok, Spcl5, LastV5} = rotating_object_check(BookOpts, "Bucket5", 1600),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = restart_from_blankledger(BookOpts, [{"Bucket3", Spcl3, LastV3},
|
2020-03-15 22:14:42 +00:00
|
|
|
{"Bucket5", Spcl5, LastV5}]),
|
|
|
|
{ok, Spcl6, LastV6} = rotating_object_check(BookOpts, "Bucket6", 3200),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
compact_and_wait(Book1),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = restart_from_blankledger(BookOptsAlt, [{"Bucket3", Spcl3, LastV3},
|
2020-03-09 15:12:48 +00:00
|
|
|
{"Bucket4", Spcl4, LastV4},
|
|
|
|
{"Bucket5", Spcl5, LastV5},
|
|
|
|
{"Bucket6", Spcl6, LastV6}]),
|
|
|
|
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOptsAlt),
|
2020-03-15 22:14:42 +00:00
|
|
|
compact_and_wait(Book2),
|
|
|
|
ok = leveled_bookie:book_close(Book2),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = restart_from_blankledger(BookOptsAlt, [{"Bucket3", Spcl3, LastV3},
|
|
|
|
{"Bucket4", Spcl4, LastV4},
|
|
|
|
{"Bucket5", Spcl5, LastV5},
|
|
|
|
{"Bucket6", Spcl6, LastV6}]),
|
|
|
|
|
|
|
|
{ok, Book3} = leveled_bookie:book_start(BookOptsAlt),
|
|
|
|
|
|
|
|
{KSpcL2, _V2} = testutil:put_indexed_objects(Book3, "AltBucket6", 3000),
|
2020-03-09 15:12:48 +00:00
|
|
|
Q2 = fun(RT) -> {index_query,
|
|
|
|
"AltBucket6",
|
|
|
|
{fun testutil:foldkeysfun/3, []},
|
|
|
|
{"idx1_bin", "#", "|"},
|
|
|
|
{RT, undefined}}
|
|
|
|
end,
|
2020-03-15 22:14:42 +00:00
|
|
|
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
|
2020-03-09 15:12:48 +00:00
|
|
|
KeyList2A = lists:usort(KFolder2A()),
|
|
|
|
true = length(KeyList2A) == 3000,
|
|
|
|
|
|
|
|
DeleteFun =
|
|
|
|
fun({DK, [{add, DIdx, DTerm}]}) ->
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = testutil:book_riakdelete(Book3,
|
2020-03-09 15:12:48 +00:00
|
|
|
"AltBucket6",
|
|
|
|
DK,
|
|
|
|
[{remove, DIdx, DTerm}])
|
|
|
|
end,
|
|
|
|
lists:foreach(DeleteFun, KSpcL2),
|
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
{async, KFolder3AD} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
|
|
|
|
KeyList3AD = lists:usort(KFolder3AD()),
|
|
|
|
true = length(KeyList3AD) == 0,
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = leveled_bookie:book_close(Book3),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
{ok, Book4} = leveled_bookie:book_start(BookOptsAlt),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
|
|
|
io:format("Compact after deletions~n"),
|
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
compact_and_wait(Book4),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
{async, KFolder4AD} = leveled_bookie:book_returnfolder(Book4, Q2(false)),
|
|
|
|
KeyList4AD = lists:usort(KFolder4AD()),
|
|
|
|
true = length(KeyList4AD) == 0,
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = leveled_bookie:book_close(Book4),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
|
|
|
|
2020-03-16 12:51:14 +00:00
|
|
|
stdtag_recalc(_Config) ->
|
|
|
|
%% Setting the ?STD_TAG to do recalc, should result in the ?STD_TAG
|
|
|
|
%% behaving like recovr - as no recalc is done for ?STD_TAG
|
|
|
|
|
|
|
|
%% NOTE -This is a test to confirm bad things happen!
|
|
|
|
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
B0 = <<"B0">>,
|
|
|
|
KeyCount = 7000,
|
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
|
|
|
{max_journalobjectcount, 5000},
|
|
|
|
{max_pencillercachesize, 10000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
|
|
|
{reload_strategy, [{?STD_TAG, recalc}]}],
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
LoadFun =
|
|
|
|
fun(Book) ->
|
|
|
|
fun(I) ->
|
|
|
|
testutil:stdload_object(Book,
|
|
|
|
B0, erlang:phash2(I rem KeyCount),
|
|
|
|
I, erlang:phash2({value, I}),
|
|
|
|
infinity, ?STD_TAG, false, false)
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
lists:foreach(LoadFun(Book1), lists:seq(1, KeyCount)),
|
|
|
|
lists:foreach(LoadFun(Book1), lists:seq(KeyCount + 1, KeyCount * 2)),
|
|
|
|
|
|
|
|
CountFold =
|
|
|
|
fun(Book, CurrentCount) ->
|
|
|
|
leveled_bookie:book_indexfold(Book,
|
|
|
|
B0,
|
|
|
|
{fun(_BF, _KT, Acc) -> Acc + 1 end,
|
|
|
|
0},
|
|
|
|
{<<"temp_int">>, 0, CurrentCount},
|
|
|
|
{true, undefined})
|
|
|
|
end,
|
|
|
|
|
|
|
|
{async, FolderA} = CountFold(Book1, 2 * KeyCount),
|
|
|
|
CountA = FolderA(),
|
|
|
|
io:format("Counted double index entries ~w - everything loaded OK~n",
|
|
|
|
[CountA]),
|
|
|
|
true = 2 * KeyCount == CountA,
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOpts),
|
|
|
|
lists:foreach(LoadFun(Book2), lists:seq(KeyCount * 2 + 1, KeyCount * 3)),
|
|
|
|
|
|
|
|
{async, FolderB} = CountFold(Book2, 3 * KeyCount),
|
|
|
|
CountB = FolderB(),
|
|
|
|
io:format("Maybe counted less index entries ~w - everything not loaded~n",
|
|
|
|
[CountB]),
|
|
|
|
true = 3 * KeyCount >= CountB,
|
|
|
|
|
|
|
|
compact_and_wait(Book2),
|
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
|
|
|
|
io:format("Restart from blank ledger"),
|
|
|
|
|
|
|
|
leveled_penciller:clean_testdir(proplists:get_value(root_path, BookOpts) ++
|
|
|
|
"/ledger"),
|
|
|
|
{ok, Book3} = leveled_bookie:book_start(BookOpts),
|
|
|
|
|
|
|
|
{async, FolderC} = CountFold(Book3, 3 * KeyCount),
|
|
|
|
CountC = FolderC(),
|
|
|
|
io:format("Missing index entries ~w - recalc not supported on ?STD_TAG~n",
|
|
|
|
[CountC]),
|
|
|
|
true = 3 * KeyCount > CountC,
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book3),
|
|
|
|
|
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
|
|
|
|
2016-11-09 16:27:47 +00:00
|
|
|
recovr_strategy(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
2020-03-09 15:12:48 +00:00
|
|
|
{max_journalobjectcount, 8000},
|
2016-11-25 17:41:08 +00:00
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
2016-11-09 16:27:47 +00:00
|
|
|
{reload_strategy, [{?RIAK_TAG, recovr}]}],
|
|
|
|
|
|
|
|
R6 = rotating_object_check(BookOpts, "Bucket6", 6400),
|
|
|
|
{ok, AllSpcL, V4} = R6,
|
|
|
|
leveled_penciller:clean_testdir(proplists:get_value(root_path, BookOpts) ++
|
|
|
|
"/ledger"),
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
|
2016-11-09 20:03:44 +00:00
|
|
|
{TestObject, TestSpec} = testutil:generate_testobject(),
|
|
|
|
ok = testutil:book_riakput(Book1, TestObject, TestSpec),
|
|
|
|
ok = testutil:book_riakdelete(Book1,
|
2018-12-11 20:42:00 +00:00
|
|
|
testutil:get_bucket(TestObject),
|
|
|
|
testutil:get_key(TestObject),
|
2016-11-09 20:03:44 +00:00
|
|
|
[]),
|
|
|
|
|
2016-11-09 16:27:47 +00:00
|
|
|
lists:foreach(fun({K, _SpcL}) ->
|
|
|
|
{ok, OH} = testutil:book_riakhead(Book1, "Bucket6", K),
|
2016-11-28 22:26:09 +00:00
|
|
|
VCH = testutil:get_vclock(OH),
|
2016-11-09 16:27:47 +00:00
|
|
|
{ok, OG} = testutil:book_riakget(Book1, "Bucket6", K),
|
|
|
|
V = testutil:get_value(OG),
|
2016-11-28 22:26:09 +00:00
|
|
|
VCG = testutil:get_vclock(OG),
|
|
|
|
true = V == V4,
|
|
|
|
true = VCH == VCG
|
2016-11-09 16:27:47 +00:00
|
|
|
end,
|
|
|
|
lists:nthtail(6400, AllSpcL)),
|
2016-11-18 11:53:14 +00:00
|
|
|
Q = fun(RT) -> {index_query,
|
|
|
|
"Bucket6",
|
|
|
|
{fun testutil:foldkeysfun/3, []},
|
2017-06-16 10:14:24 +01:00
|
|
|
{"idx1_bin", "#", "|"},
|
2016-11-18 11:53:14 +00:00
|
|
|
{RT, undefined}}
|
|
|
|
end,
|
|
|
|
{async, TFolder} = leveled_bookie:book_returnfolder(Book1, Q(true)),
|
2016-11-09 16:27:47 +00:00
|
|
|
KeyTermList = TFolder(),
|
2016-11-18 11:53:14 +00:00
|
|
|
{async, KFolder} = leveled_bookie:book_returnfolder(Book1, Q(false)),
|
2016-11-09 16:27:47 +00:00
|
|
|
KeyList = lists:usort(KFolder()),
|
|
|
|
io:format("KeyList ~w KeyTermList ~w~n",
|
|
|
|
[length(KeyList), length(KeyTermList)]),
|
|
|
|
true = length(KeyList) == 6400,
|
|
|
|
true = length(KeyList) < length(KeyTermList),
|
2016-11-28 23:00:12 +00:00
|
|
|
true = length(KeyTermList) < 25600,
|
2020-03-09 15:12:48 +00:00
|
|
|
|
2016-11-28 23:00:12 +00:00
|
|
|
ok = leveled_bookie:book_close(Book1),
|
2020-03-09 15:12:48 +00:00
|
|
|
|
|
|
|
RevisedOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
|
|
|
{max_journalobjectcount, 2000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
|
|
|
{reload_strategy, [{?RIAK_TAG, recovr}]}],
|
|
|
|
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(RevisedOpts),
|
|
|
|
|
|
|
|
{KSpcL2, _V2} = testutil:put_indexed_objects(Book2, "AltBucket6", 3000),
|
|
|
|
{async, KFolder2} = leveled_bookie:book_returnfolder(Book2, Q(false)),
|
|
|
|
KeyList2 = lists:usort(KFolder2()),
|
|
|
|
true = length(KeyList2) == 6400,
|
|
|
|
|
|
|
|
Q2 = fun(RT) -> {index_query,
|
|
|
|
"AltBucket6",
|
|
|
|
{fun testutil:foldkeysfun/3, []},
|
|
|
|
{"idx1_bin", "#", "|"},
|
|
|
|
{RT, undefined}}
|
|
|
|
end,
|
|
|
|
{async, KFolder2A} = leveled_bookie:book_returnfolder(Book2, Q2(false)),
|
|
|
|
KeyList2A = lists:usort(KFolder2A()),
|
|
|
|
true = length(KeyList2A) == 3000,
|
|
|
|
|
|
|
|
DeleteFun =
|
|
|
|
fun({DK, [{add, DIdx, DTerm}]}) ->
|
|
|
|
ok = testutil:book_riakdelete(Book2,
|
|
|
|
"AltBucket6",
|
|
|
|
DK,
|
|
|
|
[{remove, DIdx, DTerm}])
|
|
|
|
end,
|
|
|
|
lists:foreach(DeleteFun, KSpcL2),
|
|
|
|
|
|
|
|
{async, KFolder2AD} = leveled_bookie:book_returnfolder(Book2, Q2(false)),
|
|
|
|
KeyList2AD = lists:usort(KFolder2AD()),
|
|
|
|
true = length(KeyList2AD) == 0,
|
|
|
|
|
|
|
|
compact_and_wait(Book2),
|
|
|
|
compact_and_wait(Book2),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
|
|
|
|
{ok, Book3} = leveled_bookie:book_start(RevisedOpts),
|
|
|
|
{async, KFolder3AD} = leveled_bookie:book_returnfolder(Book3, Q2(false)),
|
|
|
|
KeyList3AD = lists:usort(KFolder3AD()),
|
|
|
|
true = length(KeyList3AD) == 0,
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book3),
|
|
|
|
|
2016-11-28 23:00:12 +00:00
|
|
|
testutil:reset_filestructure().
|
2016-11-09 16:27:47 +00:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
|
2017-04-21 15:55:03 +01:00
|
|
|
aae_missingjournal(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
StartOpts = [{root_path, RootPath},
|
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, Bookie1} = leveled_bookie:book_start(StartOpts),
|
|
|
|
{TestObject, TestSpec} = testutil:generate_testobject(),
|
|
|
|
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
|
|
|
testutil:check_forobject(Bookie1, TestObject),
|
|
|
|
GenList = [2],
|
|
|
|
_CLs = testutil:load_objects(20000, GenList, Bookie1, TestObject,
|
|
|
|
fun testutil:generate_objects/2),
|
|
|
|
|
|
|
|
FoldHeadsFun =
|
|
|
|
fun(B, K, _V, Acc) -> [{B, K}|Acc] end,
|
|
|
|
|
|
|
|
{async, AllHeadF1} =
|
2018-08-23 10:27:43 +01:00
|
|
|
leveled_bookie:book_headfold(Bookie1,
|
|
|
|
?RIAK_TAG,
|
|
|
|
{FoldHeadsFun, []},
|
|
|
|
true,
|
|
|
|
true,
|
|
|
|
false),
|
2017-04-21 15:55:03 +01:00
|
|
|
HeadL1 = length(AllHeadF1()),
|
|
|
|
io:format("Fold head returned ~w objects~n", [HeadL1]),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
|
|
|
CDBFiles = testutil:find_journals(RootPath),
|
|
|
|
[HeadF|_Rest] = CDBFiles,
|
|
|
|
io:format("Selected Journal for removal of ~s~n", [HeadF]),
|
|
|
|
ok = file:delete(RootPath ++ "/journal/journal_files/" ++ HeadF),
|
|
|
|
|
|
|
|
{ok, Bookie2} = leveled_bookie:book_start(StartOpts),
|
|
|
|
% Check that fold heads picks up on the missing file
|
|
|
|
{async, AllHeadF2} =
|
|
|
|
leveled_bookie:book_returnfolder(Bookie2,
|
|
|
|
{foldheads_allkeys,
|
|
|
|
?RIAK_TAG,
|
2017-09-15 10:33:16 +01:00
|
|
|
FoldHeadsFun,
|
2018-11-01 17:30:18 +00:00
|
|
|
true, true, false,
|
|
|
|
false, false}),
|
2017-04-21 15:55:03 +01:00
|
|
|
HeadL2 = length(AllHeadF2()),
|
|
|
|
io:format("Fold head returned ~w objects~n", [HeadL2]),
|
|
|
|
true = HeadL2 < HeadL1,
|
|
|
|
true = HeadL2 > 0,
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
2020-11-27 02:35:27 +00:00
|
|
|
simple_cachescoring(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
StartOpts = [{root_path, RootPath},
|
|
|
|
{max_journalobjectcount, 2000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
|
|
|
{ok, Bookie1} =
|
|
|
|
leveled_bookie:book_start(StartOpts ++
|
|
|
|
[{journalcompaction_scoreonein, 8}]),
|
|
|
|
{TestObject, TestSpec} = testutil:generate_testobject(),
|
|
|
|
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
|
|
|
testutil:check_forobject(Bookie1, TestObject),
|
|
|
|
GenList = [2, 32002, 64002, 96002],
|
|
|
|
_CLs = testutil:load_objects(32000, GenList, Bookie1, TestObject,
|
|
|
|
fun testutil:generate_objects/2),
|
|
|
|
|
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
|
|
|
WaitForCompaction =
|
|
|
|
fun(B) ->
|
|
|
|
fun(X, Pending) ->
|
|
|
|
case X of
|
|
|
|
1 ->
|
|
|
|
leveled_bookie:book_compactjournal(B, 30000);
|
|
|
|
_ ->
|
|
|
|
ok
|
|
|
|
end,
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
|
|
|
io:format("Loop ~w waiting for journal "
|
|
|
|
++ "compaction to complete~n", [X]),
|
|
|
|
timer:sleep(100),
|
|
|
|
F(B)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
io:format("Scoring for first time - every file should need scoring~n"),
|
|
|
|
Args1 = [WaitForCompaction(Bookie1), true, lists:seq(1, 300)],
|
|
|
|
{TC0, false} = timer:tc(lists, foldl, Args1),
|
|
|
|
io:format("Score four more times with cached scoring~n"),
|
|
|
|
{TC1, false} = timer:tc(lists, foldl, Args1),
|
|
|
|
{TC2, false} = timer:tc(lists, foldl, Args1),
|
|
|
|
{TC3, false} = timer:tc(lists, foldl, Args1),
|
|
|
|
{TC4, false} = timer:tc(lists, foldl, Args1),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
|
|
|
{ok, Bookie2} =
|
|
|
|
leveled_bookie:book_start(StartOpts),
|
|
|
|
io:format("Re-opened bookie withour caching - re-compare compaction time~n"),
|
|
|
|
io:format("Scoring for first time - every file should need scoring~n"),
|
|
|
|
Args2 = [WaitForCompaction(Bookie2), true, lists:seq(1, 300)],
|
|
|
|
{TN0, false} = timer:tc(lists, foldl, Args2),
|
|
|
|
io:format("Score four more times with cached scoring~n"),
|
|
|
|
{TN1, false} = timer:tc(lists, foldl, Args2),
|
|
|
|
{TN2, false} = timer:tc(lists, foldl, Args2),
|
|
|
|
{TN3, false} = timer:tc(lists, foldl, Args2),
|
|
|
|
{TN4, false} = timer:tc(lists, foldl, Args2),
|
|
|
|
|
|
|
|
AvgSecondRunCache = (TC1 + TC2 +TC3 + TC4) div 4000,
|
|
|
|
AvgSecondRunNoCache = (TN1 + TN2 +TN3 + TN4) div 4000,
|
|
|
|
|
|
|
|
io:format("With caching ~w first run ~w average other runs~n",
|
|
|
|
[TC0 div 1000, AvgSecondRunCache]),
|
|
|
|
io:format("Without caching ~w first run ~w average other runs~n",
|
|
|
|
[TN0 div 1000, AvgSecondRunNoCache]),
|
|
|
|
true = (TC0 > AvgSecondRunCache),
|
|
|
|
true = (TC0/AvgSecondRunCache) > (TN0/AvgSecondRunNoCache),
|
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
|
|
|
|
|
|
|
io:format("Exit having proven simply that caching score is faster~n"),
|
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
|
|
|
|
2016-10-31 20:58:19 +00:00
|
|
|
aae_bustedjournal(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
2016-11-02 12:58:27 +00:00
|
|
|
StartOpts = [{root_path, RootPath},
|
2016-11-25 17:41:08 +00:00
|
|
|
{max_journalsize, 20000000},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}],
|
2016-10-31 20:58:19 +00:00
|
|
|
{ok, Bookie1} = leveled_bookie:book_start(StartOpts),
|
|
|
|
{TestObject, TestSpec} = testutil:generate_testobject(),
|
2016-11-07 10:11:57 +00:00
|
|
|
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
2016-10-31 20:58:19 +00:00
|
|
|
testutil:check_forobject(Bookie1, TestObject),
|
|
|
|
GenList = [2],
|
2017-11-28 14:57:17 +00:00
|
|
|
_CLs = testutil:load_objects(16000, GenList, Bookie1, TestObject,
|
2016-10-31 20:58:19 +00:00
|
|
|
fun testutil:generate_objects/2),
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
2017-11-28 14:57:17 +00:00
|
|
|
|
2016-11-01 00:46:14 +00:00
|
|
|
CDBFiles = testutil:find_journals(RootPath),
|
2016-10-31 20:58:19 +00:00
|
|
|
[HeadF|_Rest] = CDBFiles,
|
2017-11-28 14:57:17 +00:00
|
|
|
% Select the file to corrupt before completing the load - so as
|
|
|
|
% not to corrupt the journal required on startup
|
|
|
|
{ok, TempB} = leveled_bookie:book_start(StartOpts),
|
|
|
|
% Load the remaining objects which may be reloaded on startup due to
|
|
|
|
% non-writing of L0
|
|
|
|
_CLsAdd = testutil:load_objects(4000,
|
|
|
|
[16002],
|
|
|
|
TempB,
|
|
|
|
TestObject,
|
|
|
|
fun testutil:generate_objects/2),
|
|
|
|
ok = leveled_bookie:book_close(TempB),
|
|
|
|
|
2016-10-31 20:58:19 +00:00
|
|
|
io:format("Selected Journal for corruption of ~s~n", [HeadF]),
|
2016-11-03 12:11:50 +00:00
|
|
|
testutil:corrupt_journal(RootPath, HeadF, 1000, 2048, 1000),
|
2016-10-31 20:58:19 +00:00
|
|
|
{ok, Bookie2} = leveled_bookie:book_start(StartOpts),
|
|
|
|
|
2018-05-02 00:23:26 +01:00
|
|
|
FoldKeysFun = fun(B, K, Acc) -> [{B, K}|Acc] end,
|
2016-11-18 17:58:43 +00:00
|
|
|
AllKeyQuery = {keylist, o_rkv, {FoldKeysFun, []}},
|
2016-11-18 15:53:22 +00:00
|
|
|
{async, KeyF} = leveled_bookie:book_returnfolder(Bookie2, AllKeyQuery),
|
2016-10-31 20:58:19 +00:00
|
|
|
KeyList = KeyF(),
|
|
|
|
20001 = length(KeyList),
|
|
|
|
HeadCount = lists:foldl(fun({B, K}, Acc) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
case testutil:book_riakhead(Bookie2,
|
2016-10-31 20:58:19 +00:00
|
|
|
B,
|
|
|
|
K) of
|
|
|
|
{ok, _} -> Acc + 1;
|
|
|
|
not_found -> Acc
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
0,
|
|
|
|
KeyList),
|
|
|
|
20001 = HeadCount,
|
|
|
|
GetCount = lists:foldl(fun({B, K}, Acc) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
case testutil:book_riakget(Bookie2,
|
2016-10-31 20:58:19 +00:00
|
|
|
B,
|
|
|
|
K) of
|
|
|
|
{ok, _} -> Acc + 1;
|
|
|
|
not_found -> Acc
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
0,
|
|
|
|
KeyList),
|
|
|
|
true = GetCount > 19000,
|
|
|
|
true = GetCount < HeadCount,
|
|
|
|
|
2016-10-31 22:17:29 +00:00
|
|
|
{async, HashTreeF1} = leveled_bookie:book_returnfolder(Bookie2,
|
2017-06-16 12:38:49 +01:00
|
|
|
{hashlist_query,
|
2016-10-31 22:17:29 +00:00
|
|
|
?RIAK_TAG,
|
|
|
|
false}),
|
|
|
|
KeyHashList1 = HashTreeF1(),
|
|
|
|
20001 = length(KeyHashList1),
|
|
|
|
{async, HashTreeF2} = leveled_bookie:book_returnfolder(Bookie2,
|
2017-06-16 12:38:49 +01:00
|
|
|
{hashlist_query,
|
2016-10-31 22:17:29 +00:00
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
true}),
|
2016-10-31 22:17:29 +00:00
|
|
|
KeyHashList2 = HashTreeF2(),
|
|
|
|
% The file is still there, and the hashtree is not corrupted
|
|
|
|
KeyHashList2 = KeyHashList1,
|
|
|
|
% Will need to remove the file or corrupt the hashtree to get presence to
|
|
|
|
% fail
|
|
|
|
|
2017-06-16 10:14:24 +01:00
|
|
|
FoldObjectsFun =
|
|
|
|
fun(B, K, V, Acc) ->
|
|
|
|
VC = testutil:get_vclock(V),
|
|
|
|
H = erlang:phash2(lists:sort(VC)),
|
|
|
|
[{B, K, H}|Acc]
|
|
|
|
end,
|
2016-11-02 15:38:51 +00:00
|
|
|
SW = os:timestamp(),
|
|
|
|
{async, HashTreeF3} = leveled_bookie:book_returnfolder(Bookie2,
|
|
|
|
{foldobjects_allkeys,
|
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
FoldObjectsFun,
|
|
|
|
false}),
|
2016-11-02 15:38:51 +00:00
|
|
|
KeyHashList3 = HashTreeF3(),
|
|
|
|
|
|
|
|
true = length(KeyHashList3) > 19000,
|
|
|
|
true = length(KeyHashList3) < HeadCount,
|
|
|
|
Delta = length(lists:subtract(KeyHashList1, KeyHashList3)),
|
|
|
|
true = Delta < 1001,
|
|
|
|
io:format("Fetch of hashtree using fold objects took ~w microseconds" ++
|
|
|
|
" and found a Delta of ~w and an objects count of ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW),
|
|
|
|
Delta,
|
|
|
|
length(KeyHashList3)]),
|
|
|
|
|
2016-10-31 20:58:19 +00:00
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
2016-11-03 12:11:50 +00:00
|
|
|
{ok, BytesCopied} = testutil:restore_file(RootPath, HeadF),
|
|
|
|
io:format("File restored is of size ~w~n", [BytesCopied]),
|
|
|
|
{ok, Bookie3} = leveled_bookie:book_start(StartOpts),
|
|
|
|
|
|
|
|
SW4 = os:timestamp(),
|
|
|
|
{async, HashTreeF4} = leveled_bookie:book_returnfolder(Bookie3,
|
|
|
|
{foldobjects_allkeys,
|
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
FoldObjectsFun,
|
|
|
|
false}),
|
2016-11-03 12:11:50 +00:00
|
|
|
KeyHashList4 = HashTreeF4(),
|
|
|
|
|
|
|
|
true = length(KeyHashList4) == 20001,
|
|
|
|
io:format("Fetch of hashtree using fold objects took ~w microseconds" ++
|
|
|
|
" and found an object count of ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW4), length(KeyHashList4)]),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie3),
|
|
|
|
testutil:corrupt_journal(RootPath, HeadF, 500, BytesCopied - 8000, 14),
|
|
|
|
|
|
|
|
{ok, Bookie4} = leveled_bookie:book_start(StartOpts),
|
|
|
|
|
|
|
|
SW5 = os:timestamp(),
|
|
|
|
{async, HashTreeF5} = leveled_bookie:book_returnfolder(Bookie4,
|
|
|
|
{foldobjects_allkeys,
|
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
FoldObjectsFun,
|
|
|
|
false}),
|
2016-11-03 12:11:50 +00:00
|
|
|
KeyHashList5 = HashTreeF5(),
|
|
|
|
|
|
|
|
true = length(KeyHashList5) > 19000,
|
|
|
|
true = length(KeyHashList5) < HeadCount,
|
|
|
|
Delta5 = length(lists:subtract(KeyHashList1, KeyHashList5)),
|
|
|
|
true = Delta5 < 1001,
|
|
|
|
io:format("Fetch of hashtree using fold objects took ~w microseconds" ++
|
|
|
|
" and found a Delta of ~w and an objects count of ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW5),
|
|
|
|
Delta5,
|
|
|
|
length(KeyHashList5)]),
|
|
|
|
|
|
|
|
{async, HashTreeF6} = leveled_bookie:book_returnfolder(Bookie4,
|
2017-06-16 12:38:49 +01:00
|
|
|
{hashlist_query,
|
2016-11-03 12:11:50 +00:00
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
true}),
|
2016-11-03 12:11:50 +00:00
|
|
|
KeyHashList6 = HashTreeF6(),
|
|
|
|
true = length(KeyHashList6) > 19000,
|
|
|
|
true = length(KeyHashList6) < HeadCount,
|
2017-04-21 15:55:03 +01:00
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
ok = leveled_bookie:book_close(Bookie4),
|
|
|
|
|
|
|
|
testutil:restore_topending(RootPath, HeadF),
|
|
|
|
|
|
|
|
{ok, Bookie5} = leveled_bookie:book_start(StartOpts),
|
|
|
|
|
|
|
|
SW6 = os:timestamp(),
|
|
|
|
{async, HashTreeF7} = leveled_bookie:book_returnfolder(Bookie5,
|
|
|
|
{foldobjects_allkeys,
|
|
|
|
?RIAK_TAG,
|
2017-10-17 20:39:11 +01:00
|
|
|
FoldObjectsFun,
|
|
|
|
false}),
|
2016-11-03 12:11:50 +00:00
|
|
|
KeyHashList7 = HashTreeF7(),
|
|
|
|
|
|
|
|
true = length(KeyHashList7) == 20001,
|
|
|
|
io:format("Fetch of hashtree using fold objects took ~w microseconds" ++
|
|
|
|
" and found an object count of ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW6), length(KeyHashList7)]),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie5),
|
2016-10-31 20:58:19 +00:00
|
|
|
testutil:reset_filestructure().
|
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
|
2016-11-01 00:46:14 +00:00
|
|
|
journal_compaction_bustedjournal(_Config) ->
|
2016-12-16 23:18:55 +00:00
|
|
|
% Different circumstances will be created in different runs
|
2017-11-06 21:16:46 +00:00
|
|
|
busted_journal_test(10000000, native, on_receipt, true),
|
|
|
|
busted_journal_test(7777777, lz4, on_compact, true),
|
|
|
|
busted_journal_test(8888888, lz4, on_receipt, true),
|
|
|
|
busted_journal_test(7777777, lz4, on_compact, false).
|
2016-12-16 23:18:55 +00:00
|
|
|
|
|
|
|
|
2017-11-06 21:16:46 +00:00
|
|
|
busted_journal_test(MaxJournalSize, PressMethod, PressPoint, Bust) ->
|
2016-11-01 00:46:14 +00:00
|
|
|
% Simply confirms that none of this causes a crash
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
2016-11-02 12:58:27 +00:00
|
|
|
StartOpts1 = [{root_path, RootPath},
|
2016-12-16 23:18:55 +00:00
|
|
|
{max_journalsize, MaxJournalSize},
|
2016-11-25 17:41:08 +00:00
|
|
|
{max_run_length, 10},
|
2017-11-06 18:44:08 +00:00
|
|
|
{sync_strategy, testutil:sync_strategy()},
|
|
|
|
{compression_method, PressMethod},
|
|
|
|
{compression_point, PressPoint}],
|
2016-11-01 00:46:14 +00:00
|
|
|
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
|
|
|
{TestObject, TestSpec} = testutil:generate_testobject(),
|
2016-11-07 10:11:57 +00:00
|
|
|
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
2016-11-01 00:46:14 +00:00
|
|
|
testutil:check_forobject(Bookie1, TestObject),
|
|
|
|
ObjList1 = testutil:generate_objects(50000, 2),
|
|
|
|
lists:foreach(fun({_RN, Obj, Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
testutil:book_riakput(Bookie1, Obj, Spc) end,
|
2016-11-01 00:46:14 +00:00
|
|
|
ObjList1),
|
|
|
|
%% Now replace all the objects
|
|
|
|
ObjList2 = testutil:generate_objects(50000, 2),
|
|
|
|
lists:foreach(fun({_RN, Obj, Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
testutil:book_riakput(Bookie1, Obj, Spc) end,
|
2016-11-01 00:46:14 +00:00
|
|
|
ObjList2),
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
|
|
|
|
2017-11-06 21:16:46 +00:00
|
|
|
case Bust of
|
|
|
|
true ->
|
|
|
|
CDBFiles = testutil:find_journals(RootPath),
|
|
|
|
lists:foreach(fun(FN) ->
|
|
|
|
testutil:corrupt_journal(RootPath,
|
|
|
|
FN,
|
|
|
|
100, 2048, 1000)
|
|
|
|
end,
|
|
|
|
CDBFiles);
|
|
|
|
false ->
|
|
|
|
ok
|
|
|
|
end,
|
2016-11-01 00:46:14 +00:00
|
|
|
|
|
|
|
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_compactjournal(Bookie2, 30000),
|
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
|
|
|
lists:foldl(fun(X, Pending) ->
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
|
|
|
io:format("Loop ~w waiting for journal "
|
|
|
|
++ "compaction to complete~n", [X]),
|
|
|
|
timer:sleep(20000),
|
|
|
|
F(Bookie2)
|
|
|
|
end end,
|
|
|
|
true,
|
|
|
|
lists:seq(1, 15)),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
|
|
|
testutil:reset_filestructure(10000).
|
|
|
|
|
|
|
|
|
2019-07-24 18:03:22 +01:00
|
|
|
allkeydelta_journal_multicompact(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
2019-08-29 10:32:07 +01:00
|
|
|
CompPath = filename:join(RootPath, "journal/journal_files/post_compact"),
|
2019-07-24 18:03:22 +01:00
|
|
|
B = <<"test_bucket">>,
|
2019-07-25 09:45:23 +01:00
|
|
|
StartOptsFun =
|
|
|
|
fun(JOC) ->
|
|
|
|
[{root_path, RootPath},
|
|
|
|
{max_journalobjectcount, JOC},
|
2019-07-26 21:43:00 +01:00
|
|
|
{max_run_length, 4},
|
|
|
|
{singlefile_compactionpercentage, 70.0},
|
|
|
|
{maxrunlength_compactionpercentage, 85.0},
|
2019-07-25 09:45:23 +01:00
|
|
|
{sync_strategy, testutil:sync_strategy()}]
|
|
|
|
end,
|
2019-07-26 21:43:00 +01:00
|
|
|
{ok, Bookie1} = leveled_bookie:book_start(StartOptsFun(14000)),
|
|
|
|
{KSpcL1, _V1} = testutil:put_indexed_objects(Bookie1, B, 24000),
|
2019-07-24 18:03:22 +01:00
|
|
|
{KSpcL2, V2} = testutil:put_altered_indexed_objects(Bookie1,
|
|
|
|
B,
|
|
|
|
KSpcL1,
|
|
|
|
false),
|
|
|
|
compact_and_wait(Bookie1, 0),
|
|
|
|
compact_and_wait(Bookie1, 0),
|
2019-08-29 10:32:07 +01:00
|
|
|
{ok, FileList1} = file:list_dir(CompPath),
|
2019-07-24 18:03:22 +01:00
|
|
|
io:format("Number of files after compaction ~w~n", [length(FileList1)]),
|
|
|
|
compact_and_wait(Bookie1, 0),
|
2019-08-29 10:32:07 +01:00
|
|
|
{ok, FileList2} = file:list_dir(CompPath),
|
2019-07-24 18:03:22 +01:00
|
|
|
io:format("Number of files after compaction ~w~n", [length(FileList2)]),
|
|
|
|
true = FileList1 == FileList2,
|
|
|
|
|
|
|
|
ok = testutil:check_indexed_objects(Bookie1,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2,
|
|
|
|
V2),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
|
|
|
leveled_penciller:clean_testdir(RootPath ++ "/ledger"),
|
2019-07-25 09:45:23 +01:00
|
|
|
io:format("Restart without ledger~n"),
|
2019-07-26 21:43:00 +01:00
|
|
|
{ok, Bookie2} = leveled_bookie:book_start(StartOptsFun(13000)),
|
2019-07-24 18:03:22 +01:00
|
|
|
|
|
|
|
ok = testutil:check_indexed_objects(Bookie2,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2,
|
|
|
|
V2),
|
|
|
|
|
2019-07-25 09:45:23 +01:00
|
|
|
{KSpcL3, _V3} = testutil:put_altered_indexed_objects(Bookie2,
|
2019-07-24 18:03:22 +01:00
|
|
|
B,
|
|
|
|
KSpcL2,
|
|
|
|
false),
|
|
|
|
compact_and_wait(Bookie2, 0),
|
2019-08-29 10:32:07 +01:00
|
|
|
{ok, FileList3} = file:list_dir(CompPath),
|
2019-07-25 09:45:23 +01:00
|
|
|
io:format("Number of files after compaction ~w~n", [length(FileList3)]),
|
2019-07-24 18:03:22 +01:00
|
|
|
|
2019-07-25 09:45:23 +01:00
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
2019-07-24 18:03:22 +01:00
|
|
|
|
2019-07-25 09:45:23 +01:00
|
|
|
io:format("Restart with smaller journal object count~n"),
|
2019-07-26 21:43:00 +01:00
|
|
|
{ok, Bookie3} = leveled_bookie:book_start(StartOptsFun(7000)),
|
2019-07-24 18:03:22 +01:00
|
|
|
|
2019-07-25 09:45:23 +01:00
|
|
|
{KSpcL4, V4} = testutil:put_altered_indexed_objects(Bookie3,
|
|
|
|
B,
|
|
|
|
KSpcL3,
|
|
|
|
false),
|
|
|
|
|
|
|
|
compact_and_wait(Bookie3, 0),
|
|
|
|
|
|
|
|
ok = testutil:check_indexed_objects(Bookie3,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3 ++ KSpcL4,
|
|
|
|
V4),
|
2019-08-29 10:32:07 +01:00
|
|
|
{ok, FileList4} = file:list_dir(CompPath),
|
2019-07-25 09:45:23 +01:00
|
|
|
io:format("Number of files after compaction ~w~n", [length(FileList4)]),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Bookie3),
|
2019-08-29 10:32:07 +01:00
|
|
|
|
|
|
|
NewlyCompactedFiles = lists:subtract(FileList4, FileList3),
|
|
|
|
true = length(NewlyCompactedFiles) >= 3,
|
|
|
|
CDBFilterFun = fun(_K, _V, _P, Acc, _EF) -> {loop, Acc + 1} end,
|
|
|
|
CheckLengthFun =
|
|
|
|
fun(FN) ->
|
|
|
|
{ok, CF} =
|
|
|
|
leveled_cdb:cdb_open_reader(filename:join(CompPath, FN)),
|
|
|
|
{_LP, TK} =
|
|
|
|
leveled_cdb:cdb_scan(CF, CDBFilterFun, 0, undefined),
|
|
|
|
io:format("File ~s has ~w keys~n", [FN, TK]),
|
|
|
|
true = TK =< 7000
|
|
|
|
end,
|
|
|
|
lists:foreach(CheckLengthFun, NewlyCompactedFiles),
|
|
|
|
|
2019-07-24 18:03:22 +01:00
|
|
|
testutil:reset_filestructure(10000).
|
|
|
|
|
2019-07-26 21:43:00 +01:00
|
|
|
recompact_keydeltas(_Config) ->
|
|
|
|
RootPath = testutil:reset_filestructure(),
|
|
|
|
B = <<"test_bucket">>,
|
|
|
|
StartOptsFun =
|
|
|
|
fun(JOC) ->
|
|
|
|
[{root_path, RootPath},
|
|
|
|
{max_journalobjectcount, JOC},
|
|
|
|
{max_run_length, 4},
|
|
|
|
{singlefile_compactionpercentage, 70.0},
|
|
|
|
{maxrunlength_compactionpercentage, 85.0},
|
|
|
|
{sync_strategy, testutil:sync_strategy()}]
|
|
|
|
end,
|
|
|
|
{ok, Bookie1} = leveled_bookie:book_start(StartOptsFun(45000)),
|
|
|
|
{KSpcL1, _V1} = testutil:put_indexed_objects(Bookie1, B, 24000),
|
|
|
|
{KSpcL2, _V2} = testutil:put_altered_indexed_objects(Bookie1,
|
|
|
|
B,
|
|
|
|
KSpcL1,
|
|
|
|
false),
|
|
|
|
ok = leveled_bookie:book_close(Bookie1),
|
|
|
|
{ok, Bookie2} = leveled_bookie:book_start(StartOptsFun(45000)),
|
|
|
|
compact_and_wait(Bookie2, 0),
|
|
|
|
{KSpcL3, V3} = testutil:put_altered_indexed_objects(Bookie2,
|
|
|
|
B,
|
|
|
|
KSpcL2,
|
|
|
|
false),
|
|
|
|
compact_and_wait(Bookie2, 0),
|
|
|
|
ok = testutil:check_indexed_objects(Bookie2,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3,
|
|
|
|
V3),
|
|
|
|
ok = leveled_bookie:book_close(Bookie2),
|
|
|
|
testutil:reset_filestructure(10000).
|
|
|
|
|
|
|
|
|
2017-11-06 18:44:08 +00:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
rotating_object_check(BookOpts, B, NumberOfObjects) ->
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
{KSpcL1, V1} = testutil:put_indexed_objects(Book1, B, NumberOfObjects),
|
|
|
|
ok = testutil:check_indexed_objects(Book1,
|
|
|
|
B,
|
|
|
|
KSpcL1,
|
|
|
|
V1),
|
|
|
|
{KSpcL2, V2} = testutil:put_altered_indexed_objects(Book1,
|
|
|
|
B,
|
|
|
|
KSpcL1,
|
|
|
|
false),
|
|
|
|
ok = testutil:check_indexed_objects(Book1,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2,
|
|
|
|
V2),
|
|
|
|
{KSpcL3, V3} = testutil:put_altered_indexed_objects(Book1,
|
|
|
|
B,
|
|
|
|
KSpcL2,
|
|
|
|
false),
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = testutil:check_indexed_objects(Book1,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3,
|
|
|
|
V3),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOpts),
|
|
|
|
ok = testutil:check_indexed_objects(Book2,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3,
|
|
|
|
V3),
|
|
|
|
{KSpcL4, V4} = testutil:put_altered_indexed_objects(Book2,
|
|
|
|
B,
|
|
|
|
KSpcL3,
|
|
|
|
false),
|
|
|
|
io:format("Bucket complete - checking index before compaction~n"),
|
|
|
|
ok = testutil:check_indexed_objects(Book2,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3 ++ KSpcL4,
|
|
|
|
V4),
|
|
|
|
|
2018-09-07 17:24:27 +01:00
|
|
|
compact_and_wait(Book2),
|
|
|
|
|
|
|
|
io:format("Checking index following compaction~n"),
|
|
|
|
ok = testutil:check_indexed_objects(Book2,
|
|
|
|
B,
|
|
|
|
KSpcL1 ++ KSpcL2 ++ KSpcL3 ++ KSpcL4,
|
|
|
|
V4),
|
|
|
|
|
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
{ok, KSpcL1 ++ KSpcL2 ++ KSpcL3 ++ KSpcL4, V4}.
|
|
|
|
|
|
|
|
compact_and_wait(Book) ->
|
2019-07-24 18:03:22 +01:00
|
|
|
compact_and_wait(Book, 20000).
|
|
|
|
|
|
|
|
compact_and_wait(Book, WaitForDelete) ->
|
2018-09-07 17:24:27 +01:00
|
|
|
ok = leveled_bookie:book_compactjournal(Book, 30000),
|
2016-10-27 00:57:19 +01:00
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
|
|
|
lists:foldl(fun(X, Pending) ->
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
|
|
|
io:format("Loop ~w waiting for journal "
|
|
|
|
++ "compaction to complete~n", [X]),
|
|
|
|
timer:sleep(20000),
|
2018-09-07 17:24:27 +01:00
|
|
|
F(Book)
|
2016-10-27 00:57:19 +01:00
|
|
|
end end,
|
|
|
|
true,
|
|
|
|
lists:seq(1, 15)),
|
|
|
|
io:format("Waiting for journal deletes~n"),
|
2019-07-24 18:03:22 +01:00
|
|
|
timer:sleep(WaitForDelete).
|
2018-09-07 17:24:27 +01:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
restart_from_blankledger(BookOpts, B_SpcL) ->
|
2016-11-02 12:58:27 +00:00
|
|
|
leveled_penciller:clean_testdir(proplists:get_value(root_path, BookOpts) ++
|
2016-10-27 00:57:19 +01:00
|
|
|
"/ledger"),
|
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
io:format("Checking index following restart~n"),
|
|
|
|
lists:foreach(fun({B, SpcL, V}) ->
|
|
|
|
ok = testutil:check_indexed_objects(Book1, B, SpcL, V)
|
|
|
|
end,
|
|
|
|
B_SpcL),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
2017-07-31 20:20:39 +02:00
|
|
|
ok.
|