2016-10-18 01:59:03 +01:00
|
|
|
-module(testutil).
|
|
|
|
|
|
|
|
-include("../include/leveled.hrl").
|
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
-export([book_riakput/3,
|
|
|
|
book_riakdelete/4,
|
|
|
|
book_riakget/3,
|
|
|
|
book_riakhead/3,
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload/2,
|
2017-11-20 10:21:30 +00:00
|
|
|
stdload/2,
|
2016-11-07 10:11:57 +00:00
|
|
|
reset_filestructure/0,
|
2016-10-26 20:39:16 +01:00
|
|
|
reset_filestructure/1,
|
2016-10-18 01:59:03 +01:00
|
|
|
check_bucket_stats/2,
|
2017-12-04 15:26:01 +00:00
|
|
|
checkhead_forlist/2,
|
2016-10-18 01:59:03 +01:00
|
|
|
check_forlist/2,
|
|
|
|
check_forlist/3,
|
|
|
|
check_formissinglist/2,
|
|
|
|
check_forobject/2,
|
|
|
|
check_formissingobject/3,
|
|
|
|
generate_testobject/0,
|
|
|
|
generate_testobject/5,
|
|
|
|
generate_compressibleobjects/2,
|
|
|
|
generate_smallobjects/2,
|
|
|
|
generate_objects/2,
|
|
|
|
generate_objects/5,
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects/6,
|
|
|
|
set_object/5,
|
2018-11-05 01:21:08 +00:00
|
|
|
get_bucket/1,
|
2016-10-20 12:16:17 +01:00
|
|
|
get_key/1,
|
|
|
|
get_value/1,
|
2016-11-28 22:26:09 +00:00
|
|
|
get_vclock/1,
|
2018-10-31 18:34:27 +00:00
|
|
|
get_lastmodified/1,
|
2016-10-18 01:59:03 +01:00
|
|
|
get_compressiblevalue/0,
|
2016-11-04 14:23:37 +00:00
|
|
|
get_compressiblevalue_andinteger/0,
|
2016-10-18 01:59:03 +01:00
|
|
|
get_randomindexes_generator/1,
|
|
|
|
name_list/0,
|
2016-10-27 00:57:19 +01:00
|
|
|
load_objects/5,
|
2017-06-19 11:36:57 +01:00
|
|
|
load_objects/6,
|
2017-11-17 18:30:51 +00:00
|
|
|
update_some_objects/3,
|
|
|
|
delete_some_objects/3,
|
2016-10-27 00:57:19 +01:00
|
|
|
put_indexed_objects/3,
|
|
|
|
put_altered_indexed_objects/3,
|
|
|
|
put_altered_indexed_objects/4,
|
|
|
|
check_indexed_objects/4,
|
2016-11-01 00:46:14 +00:00
|
|
|
rotating_object_check/3,
|
2016-11-03 12:11:50 +00:00
|
|
|
corrupt_journal/5,
|
|
|
|
restore_file/2,
|
|
|
|
restore_topending/2,
|
2016-11-04 11:01:37 +00:00
|
|
|
find_journals/1,
|
2016-11-18 11:53:14 +00:00
|
|
|
wait_for_compaction/1,
|
2016-11-25 17:41:08 +00:00
|
|
|
foldkeysfun/3,
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket/3,
|
2018-04-16 17:19:20 +01:00
|
|
|
sync_strategy/0,
|
2018-09-17 10:09:28 +01:00
|
|
|
riak_object/4,
|
2018-09-25 18:32:48 +01:00
|
|
|
get_value_from_objectlistitem/1,
|
2018-10-31 16:35:53 +00:00
|
|
|
numbered_key/1,
|
|
|
|
fixed_bin_key/1,
|
|
|
|
convert_to_seconds/1]).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
-define(RETURN_TERMS, {true, undefined}).
|
2016-11-07 10:27:38 +00:00
|
|
|
-define(SLOWOFFER_DELAY, 5).
|
2016-11-28 22:26:09 +00:00
|
|
|
-define(V1_VERS, 1).
|
|
|
|
-define(MAGIC, 53). % riak_kv -> riak_object
|
|
|
|
-define(MD_VTAG, <<"X-Riak-VTag">>).
|
|
|
|
-define(MD_LASTMOD, <<"X-Riak-Last-Modified">>).
|
|
|
|
-define(MD_DELETED, <<"X-Riak-Deleted">>).
|
|
|
|
-define(EMPTY_VTAG_BIN, <<"e">>).
|
2017-06-19 11:36:57 +01:00
|
|
|
-define(ROOT_PATH, "test").
|
2016-11-28 22:26:09 +00:00
|
|
|
|
2018-09-17 10:09:28 +01:00
|
|
|
|
|
|
|
riak_object(Bucket, Key, Value, MetaData) ->
|
|
|
|
Content = #r_content{metadata=dict:from_list(MetaData), value=Value},
|
|
|
|
Obj = #r_object{bucket=Bucket,
|
|
|
|
key=Key,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
|
|
|
to_binary(v1, Obj).
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
%% =================================================
|
|
|
|
%% From riak_object
|
|
|
|
|
|
|
|
to_binary(v1, #r_object{contents=Contents, vclock=VClock}) ->
|
|
|
|
new_v1(VClock, Contents).
|
|
|
|
|
|
|
|
new_v1(Vclock, Siblings) ->
|
|
|
|
VclockBin = term_to_binary(Vclock),
|
|
|
|
VclockLen = byte_size(VclockBin),
|
|
|
|
SibCount = length(Siblings),
|
|
|
|
SibsBin = bin_contents(Siblings),
|
|
|
|
<<?MAGIC:8/integer, ?V1_VERS:8/integer, VclockLen:32/integer,
|
|
|
|
VclockBin/binary, SibCount:32/integer, SibsBin/binary>>.
|
|
|
|
|
|
|
|
bin_content(#r_content{metadata=Meta, value=Val}) ->
|
|
|
|
ValBin = encode_maybe_binary(Val),
|
|
|
|
ValLen = byte_size(ValBin),
|
|
|
|
MetaBin = meta_bin(Meta),
|
|
|
|
MetaLen = byte_size(MetaBin),
|
|
|
|
<<ValLen:32/integer, ValBin:ValLen/binary,
|
|
|
|
MetaLen:32/integer, MetaBin:MetaLen/binary>>.
|
|
|
|
|
|
|
|
bin_contents(Contents) ->
|
|
|
|
F = fun(Content, Acc) ->
|
|
|
|
<<Acc/binary, (bin_content(Content))/binary>>
|
|
|
|
end,
|
|
|
|
lists:foldl(F, <<>>, Contents).
|
|
|
|
|
|
|
|
meta_bin(MD) ->
|
|
|
|
{{VTagVal, Deleted, LastModVal}, RestBin} =
|
|
|
|
dict:fold(fun fold_meta_to_bin/3,
|
|
|
|
{{undefined, <<0>>, undefined}, <<>>},
|
|
|
|
MD),
|
|
|
|
VTagBin = case VTagVal of
|
|
|
|
undefined -> ?EMPTY_VTAG_BIN;
|
|
|
|
_ -> list_to_binary(VTagVal)
|
|
|
|
end,
|
|
|
|
VTagLen = byte_size(VTagBin),
|
|
|
|
LastModBin = case LastModVal of
|
|
|
|
undefined ->
|
|
|
|
<<0:32/integer, 0:32/integer, 0:32/integer>>;
|
|
|
|
{Mega,Secs,Micro} ->
|
|
|
|
<<Mega:32/integer, Secs:32/integer, Micro:32/integer>>
|
|
|
|
end,
|
|
|
|
<<LastModBin/binary, VTagLen:8/integer, VTagBin:VTagLen/binary,
|
|
|
|
Deleted:1/binary-unit:8, RestBin/binary>>.
|
|
|
|
|
|
|
|
fold_meta_to_bin(?MD_VTAG, Value, {{_Vt,Del,Lm},RestBin}) ->
|
|
|
|
{{Value, Del, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_LASTMOD, Value, {{Vt,Del,_Lm},RestBin}) ->
|
|
|
|
{{Vt, Del, Value}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_DELETED, true, {{Vt,_Del,Lm},RestBin})->
|
|
|
|
{{Vt, <<1>>, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_DELETED, "true", Acc) ->
|
|
|
|
fold_meta_to_bin(?MD_DELETED, true, Acc);
|
|
|
|
fold_meta_to_bin(?MD_DELETED, _, {{Vt,_Del,Lm},RestBin}) ->
|
|
|
|
{{Vt, <<0>>, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(Key, Value, {{_Vt,_Del,_Lm}=Elems,RestBin}) ->
|
|
|
|
ValueBin = encode_maybe_binary(Value),
|
|
|
|
ValueLen = byte_size(ValueBin),
|
|
|
|
KeyBin = encode_maybe_binary(Key),
|
|
|
|
KeyLen = byte_size(KeyBin),
|
|
|
|
MetaBin = <<KeyLen:32/integer, KeyBin/binary,
|
|
|
|
ValueLen:32/integer, ValueBin/binary>>,
|
|
|
|
{Elems, <<RestBin/binary, MetaBin/binary>>}.
|
|
|
|
|
|
|
|
encode_maybe_binary(Bin) when is_binary(Bin) ->
|
|
|
|
<<1, Bin/binary>>;
|
|
|
|
encode_maybe_binary(Bin) ->
|
|
|
|
<<0, (term_to_binary(Bin))/binary>>.
|
|
|
|
|
|
|
|
%% =================================================
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2016-11-25 17:41:08 +00:00
|
|
|
sync_strategy() ->
|
|
|
|
case erlang:system_info(otp_release) of
|
|
|
|
"17" ->
|
|
|
|
sync;
|
|
|
|
"18" ->
|
|
|
|
sync;
|
|
|
|
"19" ->
|
|
|
|
sync;
|
2017-09-15 15:10:04 +01:00
|
|
|
_ ->
|
|
|
|
% running the sync strategy with OTP16 on macbook is
|
|
|
|
% super slow. So revert to no sync
|
2016-11-25 17:41:08 +00:00
|
|
|
none
|
|
|
|
end.
|
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
book_riakput(Pid, RiakObject, IndexSpecs) ->
|
2016-11-28 22:26:09 +00:00
|
|
|
leveled_bookie:book_put(Pid,
|
|
|
|
RiakObject#r_object.bucket,
|
|
|
|
RiakObject#r_object.key,
|
|
|
|
to_binary(v1, RiakObject),
|
|
|
|
IndexSpecs,
|
|
|
|
?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
book_riakdelete(Pid, Bucket, Key, IndexSpecs) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_put(Pid, Bucket, Key, delete, IndexSpecs, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
book_riakget(Pid, Bucket, Key) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_get(Pid, Bucket, Key, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
book_riakhead(Pid, Bucket, Key) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_head(Pid, Bucket, Key, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload(Bookie, ObjectList) ->
|
|
|
|
lists:foreach(fun({_RN, Obj, Spc}) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
R = book_riakput(Bookie, Obj, Spc),
|
2016-11-07 10:27:38 +00:00
|
|
|
case R of
|
|
|
|
ok -> ok;
|
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ObjectList).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2017-11-20 10:21:30 +00:00
|
|
|
stdload(Bookie, Count) ->
|
|
|
|
stdload(Bookie, Count, []).
|
|
|
|
|
|
|
|
stdload(_Bookie, 0, Acc) ->
|
|
|
|
Acc;
|
|
|
|
stdload(Bookie, Count, Acc) ->
|
|
|
|
B = "Bucket",
|
2018-05-03 17:18:13 +01:00
|
|
|
K = leveled_util:generate_uuid(),
|
2017-11-20 10:21:30 +00:00
|
|
|
V = get_compressiblevalue(),
|
|
|
|
R = leveled_bookie:book_put(Bookie, B, K, V, [], ?STD_TAG),
|
|
|
|
case R of
|
|
|
|
ok -> ok;
|
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end,
|
|
|
|
stdload(Bookie, Count - 1, [{B, K, erlang:phash2(V)}|Acc]).
|
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
reset_filestructure() ->
|
2017-06-19 11:36:57 +01:00
|
|
|
reset_filestructure(0, ?ROOT_PATH).
|
2016-10-26 20:39:16 +01:00
|
|
|
|
2017-06-19 11:36:57 +01:00
|
|
|
reset_filestructure(Wait) when is_integer(Wait) ->
|
|
|
|
reset_filestructure(Wait, ?ROOT_PATH);
|
|
|
|
reset_filestructure(RootPath) when is_list(RootPath) ->
|
|
|
|
reset_filestructure(0, RootPath).
|
|
|
|
|
|
|
|
reset_filestructure(Wait, RootPath) ->
|
|
|
|
io:format("Waiting ~w ms to give a chance for all file closes " ++
|
2016-10-26 20:39:16 +01:00
|
|
|
"to complete~n", [Wait]),
|
2017-06-19 11:36:57 +01:00
|
|
|
timer:sleep(Wait),
|
2016-10-18 01:59:03 +01:00
|
|
|
filelib:ensure_dir(RootPath ++ "/journal/"),
|
|
|
|
filelib:ensure_dir(RootPath ++ "/ledger/"),
|
|
|
|
leveled_inker:clean_testdir(RootPath ++ "/journal"),
|
|
|
|
leveled_penciller:clean_testdir(RootPath ++ "/ledger"),
|
|
|
|
RootPath.
|
|
|
|
|
2016-11-14 11:17:14 +00:00
|
|
|
wait_for_compaction(Bookie) ->
|
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
|
|
|
lists:foldl(fun(X, Pending) ->
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
|
|
|
io:format("Loop ~w waiting for journal "
|
|
|
|
++ "compaction to complete~n", [X]),
|
2016-11-14 19:34:11 +00:00
|
|
|
timer:sleep(5000),
|
2016-11-14 11:17:14 +00:00
|
|
|
F(Bookie)
|
|
|
|
end end,
|
|
|
|
true,
|
|
|
|
lists:seq(1, 15)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
check_bucket_stats(Bookie, Bucket) ->
|
|
|
|
FoldSW1 = os:timestamp(),
|
|
|
|
io:format("Checking bucket size~n"),
|
|
|
|
{async, Folder1} = leveled_bookie:book_returnfolder(Bookie,
|
|
|
|
{riakbucket_stats,
|
|
|
|
Bucket}),
|
|
|
|
{B1Size, B1Count} = Folder1(),
|
|
|
|
io:format("Bucket fold completed in ~w microseconds~n",
|
|
|
|
[timer:now_diff(os:timestamp(), FoldSW1)]),
|
|
|
|
io:format("Bucket ~s has size ~w and count ~w~n",
|
|
|
|
[Bucket, B1Size, B1Count]),
|
|
|
|
{B1Size, B1Count}.
|
|
|
|
|
|
|
|
|
|
|
|
check_forlist(Bookie, ChkList) ->
|
|
|
|
check_forlist(Bookie, ChkList, false).
|
|
|
|
|
|
|
|
check_forlist(Bookie, ChkList, Log) ->
|
|
|
|
SW = os:timestamp(),
|
|
|
|
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
|
|
|
if
|
|
|
|
Log == true ->
|
2016-10-19 00:10:48 +01:00
|
|
|
io:format("Fetching Key ~s~n", [Obj#r_object.key]);
|
2016-10-18 01:59:03 +01:00
|
|
|
true ->
|
|
|
|
ok
|
|
|
|
end,
|
2016-11-07 10:11:57 +00:00
|
|
|
R = book_riakget(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
2016-11-28 22:26:09 +00:00
|
|
|
true = case R of
|
|
|
|
{ok, Val} ->
|
|
|
|
to_binary(v1, Obj) == Val;
|
2016-10-19 00:10:48 +01:00
|
|
|
not_found ->
|
|
|
|
io:format("Object not found for key ~s~n",
|
|
|
|
[Obj#r_object.key]),
|
|
|
|
error
|
|
|
|
end
|
|
|
|
end,
|
2016-10-18 01:59:03 +01:00
|
|
|
ChkList),
|
|
|
|
io:format("Fetch check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
|
|
|
|
2017-12-04 15:26:01 +00:00
|
|
|
checkhead_forlist(Bookie, ChkList) ->
|
|
|
|
SW = os:timestamp(),
|
|
|
|
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
|
|
|
R = book_riakhead(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
|
|
|
true = case R of
|
|
|
|
{ok, _Head} ->
|
|
|
|
true;
|
|
|
|
not_found ->
|
|
|
|
io:format("Object not found for key ~s~n",
|
|
|
|
[Obj#r_object.key]),
|
|
|
|
error
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ChkList),
|
|
|
|
io:format("Head check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
check_formissinglist(Bookie, ChkList) ->
|
|
|
|
SW = os:timestamp(),
|
|
|
|
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
R = book_riakget(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
2016-10-18 01:59:03 +01:00
|
|
|
R = not_found end,
|
|
|
|
ChkList),
|
|
|
|
io:format("Miss check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
|
|
|
|
|
|
|
check_forobject(Bookie, TestObject) ->
|
2016-11-28 22:26:09 +00:00
|
|
|
TestBinary = to_binary(v1, TestObject),
|
|
|
|
{ok, TestBinary} = book_riakget(Bookie,
|
2016-11-07 10:11:57 +00:00
|
|
|
TestObject#r_object.bucket,
|
|
|
|
TestObject#r_object.key),
|
2016-11-28 22:26:09 +00:00
|
|
|
{ok, HeadBinary} = book_riakhead(Bookie,
|
2016-11-07 10:11:57 +00:00
|
|
|
TestObject#r_object.bucket,
|
|
|
|
TestObject#r_object.key),
|
2017-06-27 17:11:13 +01:00
|
|
|
{{_SibMetaBin, Vclock, _Hash, size}, _LMS}
|
|
|
|
= leveled_codec:riak_extract_metadata(HeadBinary, size),
|
2017-04-04 10:02:35 +00:00
|
|
|
true = binary_to_term(Vclock) == TestObject#r_object.vclock.
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
check_formissingobject(Bookie, Bucket, Key) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
not_found = book_riakget(Bookie, Bucket, Key),
|
|
|
|
not_found = book_riakhead(Bookie, Bucket, Key).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
generate_testobject() ->
|
|
|
|
{B1, K1, V1, Spec1, MD} = {"Bucket1",
|
|
|
|
"Key1",
|
|
|
|
"Value1",
|
|
|
|
[],
|
2016-11-28 22:26:09 +00:00
|
|
|
[{"MDK1", "MDV1"}]},
|
2016-10-18 01:59:03 +01:00
|
|
|
generate_testobject(B1, K1, V1, Spec1, MD).
|
|
|
|
|
|
|
|
generate_testobject(B, K, V, Spec, MD) ->
|
2017-07-02 19:33:18 +01:00
|
|
|
MD0 = [{?MD_LASTMOD, os:timestamp()}|MD],
|
|
|
|
Content = #r_content{metadata=dict:from_list(MD0), value=V},
|
|
|
|
{#r_object{bucket=B,
|
|
|
|
key=K,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
2016-10-18 01:59:03 +01:00
|
|
|
Spec}.
|
|
|
|
|
|
|
|
|
|
|
|
generate_compressibleobjects(Count, KeyNumber) ->
|
|
|
|
V = get_compressiblevalue(),
|
|
|
|
generate_objects(Count, KeyNumber, [], V).
|
|
|
|
|
|
|
|
|
2016-11-04 14:23:37 +00:00
|
|
|
get_compressiblevalue_andinteger() ->
|
2017-07-31 20:20:39 +02:00
|
|
|
{leveled_rand:uniform(1000), get_compressiblevalue()}.
|
2016-11-04 14:23:37 +00:00
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
get_compressiblevalue() ->
|
|
|
|
S1 = "111111111111111",
|
|
|
|
S2 = "222222222222222",
|
|
|
|
S3 = "333333333333333",
|
|
|
|
S4 = "aaaaaaaaaaaaaaa",
|
|
|
|
S5 = "AAAAAAAAAAAAAAA",
|
|
|
|
S6 = "GGGGGGGGGGGGGGG",
|
|
|
|
S7 = "===============",
|
|
|
|
S8 = "...............",
|
|
|
|
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
|
|
|
|
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
|
|
|
|
L = lists:seq(1, 1024),
|
|
|
|
lists:foldl(fun(_X, Acc) ->
|
2017-07-31 20:20:39 +02:00
|
|
|
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
|
2016-10-18 01:59:03 +01:00
|
|
|
Acc ++ Str end,
|
|
|
|
"",
|
|
|
|
L).
|
|
|
|
|
|
|
|
generate_smallobjects(Count, KeyNumber) ->
|
2017-07-31 21:15:19 +02:00
|
|
|
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(512)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
generate_objects(Count, KeyNumber) ->
|
2017-07-31 21:15:19 +02:00
|
|
|
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(4096)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value) ->
|
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, fun() -> [] end).
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen) ->
|
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, "Bucket").
|
|
|
|
|
|
|
|
generate_objects(0, _KeyNumber, ObjL, _Value, _IndexGen, _Bucket) ->
|
2018-09-06 17:50:59 +01:00
|
|
|
lists:reverse(ObjL);
|
2016-11-20 21:21:31 +00:00
|
|
|
generate_objects(Count, binary_uuid, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} = set_object(list_to_binary(Bucket),
|
2018-05-03 17:18:13 +01:00
|
|
|
list_to_binary(leveled_util:generate_uuid()),
|
2016-11-20 21:21:31 +00:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
binary_uuid,
|
2018-09-06 17:50:59 +01:00
|
|
|
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-11-20 21:21:31 +00:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, uuid, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} = set_object(Bucket,
|
2018-05-03 18:26:02 +01:00
|
|
|
leveled_util:generate_uuid(),
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
uuid,
|
2018-09-06 17:50:59 +01:00
|
|
|
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
2016-10-20 12:16:17 +01:00
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2017-10-30 17:39:21 +00:00
|
|
|
generate_objects(Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} =
|
|
|
|
set_object(list_to_binary(Bucket),
|
2018-04-16 17:19:20 +01:00
|
|
|
list_to_binary(numbered_key(KeyNumber)),
|
2017-10-30 17:39:21 +00:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
{binary, KeyNumber + 1},
|
2018-09-06 17:50:59 +01:00
|
|
|
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
2017-10-30 17:39:21 +00:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2018-04-16 17:19:20 +01:00
|
|
|
generate_objects(Count, {fixed_binary, KeyNumber}, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} =
|
|
|
|
set_object(Bucket,
|
|
|
|
fixed_bin_key(KeyNumber),
|
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
{fixed_binary, KeyNumber + 1},
|
2018-09-06 17:50:59 +01:00
|
|
|
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
2018-04-16 17:19:20 +01:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} = set_object(Bucket,
|
2018-04-16 17:19:20 +01:00
|
|
|
numbered_key(KeyNumber),
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
KeyNumber + 1,
|
2018-09-06 17:50:59 +01:00
|
|
|
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
2016-10-20 12:16:17 +01:00
|
|
|
IndexGen,
|
|
|
|
Bucket).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2018-04-16 17:19:20 +01:00
|
|
|
%% @doc generates a key, exported so tests can use it without copying
|
|
|
|
%% code
|
|
|
|
-spec numbered_key(integer()) -> list().
|
|
|
|
numbered_key(KeyNumber) when is_integer(KeyNumber) ->
|
|
|
|
"Key" ++ integer_to_list(KeyNumber).
|
|
|
|
|
|
|
|
%% @doc generates a key for `KeyNumber' of a fixed size (64bits),
|
|
|
|
%% again, exported for tests to generate the same keys as
|
|
|
|
%% generate_objects/N without peeking.
|
|
|
|
-spec fixed_bin_key(integer()) -> binary().
|
|
|
|
fixed_bin_key(KeyNumber) ->
|
|
|
|
<<$K, $e, $y, KeyNumber:64/integer>>.
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
set_object(Bucket, Key, Value, IndexGen) ->
|
|
|
|
set_object(Bucket, Key, Value, IndexGen, []).
|
|
|
|
|
|
|
|
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
|
2016-11-28 22:26:09 +00:00
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
Obj = {Bucket,
|
2016-10-18 01:59:03 +01:00
|
|
|
Key,
|
|
|
|
Value,
|
2016-10-20 12:16:17 +01:00
|
|
|
IndexGen() ++ lists:map(fun({add, IdxF, IdxV}) ->
|
|
|
|
{remove, IdxF, IdxV} end,
|
|
|
|
Indexes2Remove),
|
2016-10-18 01:59:03 +01:00
|
|
|
[{"MDK", "MDV" ++ Key},
|
2017-06-30 10:03:36 +01:00
|
|
|
{"MDK2", "MDV" ++ Key},
|
|
|
|
{?MD_LASTMOD, os:timestamp()}]},
|
2016-10-18 01:59:03 +01:00
|
|
|
{B1, K1, V1, Spec1, MD} = Obj,
|
2016-11-28 22:26:09 +00:00
|
|
|
Content = #r_content{metadata=dict:from_list(MD), value=V1},
|
|
|
|
{#r_object{bucket=B1,
|
|
|
|
key=K1,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
2016-10-18 01:59:03 +01:00
|
|
|
Spec1}.
|
|
|
|
|
2018-09-25 18:32:48 +01:00
|
|
|
get_value_from_objectlistitem({_Int, Obj, _Spc}) ->
|
|
|
|
[Content] = Obj#r_object.contents,
|
|
|
|
Content#r_content.value.
|
|
|
|
|
2017-11-17 18:30:51 +00:00
|
|
|
update_some_objects(Bookie, ObjList, SampleSize) ->
|
|
|
|
StartWatchA = os:timestamp(),
|
|
|
|
ToUpdateList = lists:sublist(lists:sort(ObjList), SampleSize),
|
|
|
|
UpdateFun =
|
|
|
|
fun({R, Obj, Spec}) ->
|
|
|
|
VC = Obj#r_object.vclock,
|
|
|
|
VC0 = update_vclock(VC),
|
|
|
|
[C] = Obj#r_object.contents,
|
2018-10-31 21:37:53 +00:00
|
|
|
MD = C#r_content.metadata,
|
|
|
|
MD0 = dict:store(?MD_LASTMOD, os:timestamp(), MD),
|
|
|
|
C0 = C#r_content{value = leveled_rand:rand_bytes(512),
|
|
|
|
metadata = MD0},
|
2017-11-17 18:30:51 +00:00
|
|
|
UpdObj = Obj#r_object{vclock = VC0, contents = [C0]},
|
|
|
|
{R, UpdObj, Spec}
|
|
|
|
end,
|
|
|
|
UpdatedObjList = lists:map(UpdateFun, ToUpdateList),
|
|
|
|
riakload(Bookie, UpdatedObjList),
|
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects updates in ~w seconds~n",
|
|
|
|
[SampleSize, Time/1000000]).
|
|
|
|
|
|
|
|
delete_some_objects(Bookie, ObjList, SampleSize) ->
|
|
|
|
StartWatchA = os:timestamp(),
|
|
|
|
ToDeleteList = lists:sublist(lists:sort(ObjList), SampleSize),
|
|
|
|
DeleteFun =
|
|
|
|
fun({_R, Obj, Spec}) ->
|
|
|
|
B = Obj#r_object.bucket,
|
|
|
|
K = Obj#r_object.key,
|
|
|
|
book_riakdelete(Bookie, B, K, Spec)
|
|
|
|
end,
|
|
|
|
lists:foreach(DeleteFun, ToDeleteList),
|
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects deleted in ~w seconds~n",
|
|
|
|
[SampleSize, Time/1000000]).
|
2016-11-28 22:26:09 +00:00
|
|
|
|
|
|
|
generate_vclock() ->
|
|
|
|
lists:map(fun(X) ->
|
2017-07-31 20:20:39 +02:00
|
|
|
{_, Actor} = lists:keyfind(leveled_rand:uniform(10),
|
2016-11-28 22:26:09 +00:00
|
|
|
1,
|
|
|
|
actor_list()),
|
|
|
|
{Actor, X} end,
|
2017-07-31 20:20:39 +02:00
|
|
|
lists:seq(1, leveled_rand:uniform(8))).
|
2016-11-28 22:26:09 +00:00
|
|
|
|
2017-11-17 18:30:51 +00:00
|
|
|
update_vclock(VC) ->
|
|
|
|
[{Actor, X}|Rest] = VC,
|
|
|
|
[{Actor, X + 1}|Rest].
|
2016-11-28 22:26:09 +00:00
|
|
|
|
|
|
|
actor_list() ->
|
|
|
|
[{1, albert}, {2, bertie}, {3, clara}, {4, dave}, {5, elton},
|
|
|
|
{6, fred}, {7, george}, {8, harry}, {9, isaac}, {10, leila}].
|
|
|
|
|
2018-11-05 01:21:08 +00:00
|
|
|
get_bucket(Object) ->
|
|
|
|
Object#r_object.bucket.
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
get_key(Object) ->
|
|
|
|
Object#r_object.key.
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
get_value(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<_VclockBin:VclockLen/binary, SibCount:32/integer, SibsBin/binary>> = Rest1,
|
|
|
|
case SibCount of
|
|
|
|
1 ->
|
|
|
|
<<SibLength:32/integer, Rest2/binary>> = SibsBin,
|
|
|
|
<<ContentBin:SibLength/binary, _MetaBin/binary>> = Rest2,
|
|
|
|
case ContentBin of
|
|
|
|
<<0, ContentBin0/binary>> ->
|
|
|
|
binary_to_term(ContentBin0)
|
|
|
|
end;
|
|
|
|
N ->
|
|
|
|
io:format("SibCount of ~w with ObjectBin ~w~n", [N, ObjectBin]),
|
|
|
|
error
|
|
|
|
end.
|
|
|
|
|
2018-10-31 18:34:27 +00:00
|
|
|
get_lastmodified(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<_VclockBin:VclockLen/binary, SibCount:32/integer, SibsBin/binary>> = Rest1,
|
|
|
|
case SibCount of
|
|
|
|
1 ->
|
|
|
|
<<SibLength:32/integer, Rest2/binary>> = SibsBin,
|
|
|
|
<<_ContentBin:SibLength/binary,
|
|
|
|
MetaLength:32/integer,
|
|
|
|
MetaBin:MetaLength/binary,
|
|
|
|
_Rest3/binary>> = Rest2,
|
|
|
|
<<MegaSec:32/integer,
|
|
|
|
Sec:32/integer,
|
|
|
|
MicroSec:32/integer,
|
|
|
|
_RestMetaBin/binary>> = MetaBin,
|
|
|
|
{MegaSec, Sec, MicroSec}
|
|
|
|
end.
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
get_vclock(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<VclockBin:VclockLen/binary, _Bin/binary>> = Rest1,
|
|
|
|
binary_to_term(VclockBin).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator) ->
|
2017-06-19 11:36:57 +01:00
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator, 1000).
|
|
|
|
|
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator, SubListL) ->
|
2016-10-18 01:59:03 +01:00
|
|
|
lists:map(fun(KN) ->
|
|
|
|
ObjListA = Generator(ChunkSize, KN),
|
|
|
|
StartWatchA = os:timestamp(),
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload(Bookie, ObjListA),
|
2016-10-18 01:59:03 +01:00
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects loaded in ~w seconds~n",
|
|
|
|
[ChunkSize, Time/1000000]),
|
|
|
|
if
|
|
|
|
TestObject == no_check ->
|
|
|
|
ok;
|
|
|
|
true ->
|
|
|
|
check_forobject(Bookie, TestObject)
|
|
|
|
end,
|
2017-06-19 11:36:57 +01:00
|
|
|
lists:sublist(ObjListA, SubListL) end,
|
2016-10-18 01:59:03 +01:00
|
|
|
GenList).
|
|
|
|
|
|
|
|
|
|
|
|
get_randomindexes_generator(Count) ->
|
|
|
|
Generator = fun() ->
|
|
|
|
lists:map(fun(X) ->
|
|
|
|
{add,
|
|
|
|
"idx" ++ integer_to_list(X) ++ "_bin",
|
|
|
|
get_randomdate() ++ get_randomname()} end,
|
|
|
|
lists:seq(1, Count))
|
|
|
|
end,
|
|
|
|
Generator.
|
|
|
|
|
|
|
|
name_list() ->
|
|
|
|
[{1, "Sophia"}, {2, "Emma"}, {3, "Olivia"}, {4, "Ava"},
|
|
|
|
{5, "Isabella"}, {6, "Mia"}, {7, "Zoe"}, {8, "Lily"},
|
|
|
|
{9, "Emily"}, {10, "Madelyn"}, {11, "Madison"}, {12, "Chloe"},
|
|
|
|
{13, "Charlotte"}, {14, "Aubrey"}, {15, "Avery"},
|
|
|
|
{16, "Abigail"}].
|
|
|
|
|
|
|
|
get_randomname() ->
|
|
|
|
NameList = name_list(),
|
2017-07-31 20:20:39 +02:00
|
|
|
N = leveled_rand:uniform(16),
|
2016-10-18 01:59:03 +01:00
|
|
|
{N, Name} = lists:keyfind(N, 1, NameList),
|
|
|
|
Name.
|
|
|
|
|
|
|
|
get_randomdate() ->
|
|
|
|
LowTime = 60000000000,
|
|
|
|
HighTime = 70000000000,
|
2017-07-31 20:20:39 +02:00
|
|
|
RandPoint = LowTime + leveled_rand:uniform(HighTime - LowTime),
|
2016-10-18 01:59:03 +01:00
|
|
|
Date = calendar:gregorian_seconds_to_datetime(RandPoint),
|
|
|
|
{{Year, Month, Day}, {Hour, Minute, Second}} = Date,
|
|
|
|
lists:flatten(io_lib:format("~4..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
|
2016-10-27 00:57:19 +01:00
|
|
|
[Year, Month, Day, Hour, Minute, Second])).
|
|
|
|
|
|
|
|
|
2018-05-02 00:23:26 +01:00
|
|
|
foldkeysfun(_Bucket, Item, Acc) -> [Item|Acc].
|
2016-11-18 11:53:14 +00:00
|
|
|
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket(Bucket, {Term, Key}, Acc) ->
|
2018-05-02 00:23:26 +01:00
|
|
|
[{Term, {Bucket, Key}}|Acc];
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket(Bucket, Key, Acc) ->
|
2018-05-02 00:23:26 +01:00
|
|
|
[{Bucket, Key}|Acc].
|
2017-07-03 18:03:13 +01:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
check_indexed_objects(Book, B, KSpecL, V) ->
|
|
|
|
% Check all objects match, return what should be the results of an all
|
|
|
|
% index query
|
|
|
|
IdxR = lists:map(fun({K, Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
{ok, O} = book_riakget(Book, B, K),
|
2016-10-27 00:57:19 +01:00
|
|
|
V = testutil:get_value(O),
|
|
|
|
{add,
|
|
|
|
"idx1_bin",
|
|
|
|
IdxVal} = lists:keyfind(add, 1, Spc),
|
|
|
|
{IdxVal, K} end,
|
|
|
|
KSpecL),
|
|
|
|
% Check the all index query matches expectations
|
|
|
|
R = leveled_bookie:book_returnfolder(Book,
|
|
|
|
{index_query,
|
|
|
|
B,
|
2016-11-18 11:53:14 +00:00
|
|
|
{fun foldkeysfun/3, []},
|
2016-10-27 00:57:19 +01:00
|
|
|
{"idx1_bin",
|
|
|
|
"0",
|
2017-07-02 22:23:02 +01:00
|
|
|
"|"},
|
2016-10-27 00:57:19 +01:00
|
|
|
?RETURN_TERMS}),
|
|
|
|
SW = os:timestamp(),
|
|
|
|
{async, Fldr} = R,
|
|
|
|
QR0 = Fldr(),
|
|
|
|
io:format("Query match found of length ~w in ~w microseconds " ++
|
|
|
|
"expected ~w ~n",
|
|
|
|
[length(QR0),
|
|
|
|
timer:now_diff(os:timestamp(), SW),
|
|
|
|
length(IdxR)]),
|
|
|
|
QR = lists:sort(QR0),
|
|
|
|
ER = lists:sort(IdxR),
|
|
|
|
|
|
|
|
ok = if
|
|
|
|
ER == QR ->
|
|
|
|
ok
|
|
|
|
end,
|
|
|
|
ok.
|
|
|
|
|
|
|
|
|
|
|
|
put_indexed_objects(Book, Bucket, Count) ->
|
|
|
|
V = testutil:get_compressiblevalue(),
|
|
|
|
IndexGen = testutil:get_randomindexes_generator(1),
|
|
|
|
SW = os:timestamp(),
|
|
|
|
ObjL1 = testutil:generate_objects(Count,
|
|
|
|
uuid,
|
|
|
|
[],
|
|
|
|
V,
|
|
|
|
IndexGen,
|
|
|
|
Bucket),
|
|
|
|
KSpecL = lists:map(fun({_RN, Obj, Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
book_riakput(Book, Obj, Spc),
|
2016-10-27 00:57:19 +01:00
|
|
|
{testutil:get_key(Obj), Spc}
|
|
|
|
end,
|
|
|
|
ObjL1),
|
|
|
|
io:format("Put of ~w objects with ~w index entries "
|
|
|
|
++
|
|
|
|
"each completed in ~w microseconds~n",
|
|
|
|
[Count, 1, timer:now_diff(os:timestamp(), SW)]),
|
|
|
|
{KSpecL, V}.
|
|
|
|
|
|
|
|
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL) ->
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, true).
|
|
|
|
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i) ->
|
|
|
|
IndexGen = testutil:get_randomindexes_generator(1),
|
|
|
|
V = testutil:get_compressiblevalue(),
|
|
|
|
RplKSpecL = lists:map(fun({K, Spc}) ->
|
|
|
|
AddSpc = if
|
|
|
|
RemoveOld2i == true ->
|
|
|
|
[lists:keyfind(add, 1, Spc)];
|
|
|
|
RemoveOld2i == false ->
|
|
|
|
[]
|
|
|
|
end,
|
|
|
|
{O, AltSpc} = testutil:set_object(Bucket,
|
|
|
|
K,
|
|
|
|
V,
|
|
|
|
IndexGen,
|
|
|
|
AddSpc),
|
2016-11-07 12:18:00 +00:00
|
|
|
case book_riakput(Book, O, AltSpc) of
|
2016-11-07 12:19:17 +00:00
|
|
|
ok -> ok;
|
2016-11-07 12:18:00 +00:00
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end,
|
2016-10-27 00:57:19 +01:00
|
|
|
{K, AltSpc} end,
|
|
|
|
KSpecL),
|
|
|
|
{RplKSpecL, V}.
|
|
|
|
|
|
|
|
rotating_object_check(RootPath, B, NumberOfObjects) ->
|
2016-11-02 12:58:27 +00:00
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
2016-11-25 17:41:08 +00:00
|
|
|
{max_journalsize, 5000000},
|
|
|
|
{sync_strategy, sync_strategy()}],
|
2016-10-27 00:57:19 +01:00
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
|
|
|
{KSpcL1, V1} = testutil:put_indexed_objects(Book1, B, NumberOfObjects),
|
|
|
|
ok = testutil:check_indexed_objects(Book1, B, KSpcL1, V1),
|
|
|
|
{KSpcL2, V2} = testutil:put_altered_indexed_objects(Book1, B, KSpcL1),
|
|
|
|
ok = testutil:check_indexed_objects(Book1, B, KSpcL2, V2),
|
|
|
|
{KSpcL3, V3} = testutil:put_altered_indexed_objects(Book1, B, KSpcL2),
|
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOpts),
|
|
|
|
ok = testutil:check_indexed_objects(Book2, B, KSpcL3, V3),
|
|
|
|
{KSpcL4, V4} = testutil:put_altered_indexed_objects(Book2, B, KSpcL3),
|
|
|
|
ok = testutil:check_indexed_objects(Book2, B, KSpcL4, V4),
|
2016-11-18 15:53:22 +00:00
|
|
|
Query = {keylist, ?RIAK_TAG, B, {fun foldkeysfun/3, []}},
|
|
|
|
{async, BList} = leveled_bookie:book_returnfolder(Book2, Query),
|
2016-11-17 15:55:29 +00:00
|
|
|
true = NumberOfObjects == length(BList()),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
ok.
|
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
corrupt_journal(RootPath, FileName, Corruptions, BasePosition, GapSize) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
BackupPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".bak",
|
2016-11-28 22:26:09 +00:00
|
|
|
io:format("Corruption attempt to be made to filename ~s ~w ~w~n",
|
|
|
|
[FileName,
|
|
|
|
filelib:is_file(OriginalPath),
|
|
|
|
filelib:is_file(BackupPath)]),
|
2016-11-03 12:11:50 +00:00
|
|
|
{ok, _BytesCopied} = file:copy(OriginalPath, BackupPath),
|
|
|
|
{ok, Handle} = file:open(OriginalPath, [binary, raw, read, write]),
|
2016-11-01 00:46:14 +00:00
|
|
|
lists:foreach(fun(X) ->
|
2016-11-03 12:11:50 +00:00
|
|
|
Position = X * GapSize + BasePosition,
|
2016-11-01 00:46:14 +00:00
|
|
|
ok = file:pwrite(Handle, Position, <<0:8/integer>>)
|
|
|
|
end,
|
|
|
|
lists:seq(1, Corruptions)),
|
|
|
|
ok = file:close(Handle).
|
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
|
|
|
|
restore_file(RootPath, FileName) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
BackupPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".bak",
|
|
|
|
file:copy(BackupPath, OriginalPath).
|
|
|
|
|
|
|
|
restore_topending(RootPath, FileName) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
PndPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".pnd",
|
|
|
|
ok = file:rename(OriginalPath, PndPath),
|
|
|
|
false = filelib:is_file(OriginalPath).
|
|
|
|
|
2016-11-01 00:46:14 +00:00
|
|
|
find_journals(RootPath) ->
|
|
|
|
{ok, FNsA_J} = file:list_dir(RootPath ++ "/journal/journal_files"),
|
|
|
|
{ok, Regex} = re:compile(".*\.cdb"),
|
|
|
|
CDBFiles = lists:foldl(fun(FN, Acc) -> case re:run(FN, Regex) of
|
|
|
|
nomatch ->
|
|
|
|
Acc;
|
|
|
|
_ ->
|
|
|
|
[FN|Acc]
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
[],
|
|
|
|
FNsA_J),
|
2016-11-04 11:01:37 +00:00
|
|
|
CDBFiles.
|
|
|
|
|
2018-10-31 16:35:53 +00:00
|
|
|
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
|
|
|
|
MegaSec * 1000000 + Seconds.
|