2016-10-18 01:59:03 +01:00
|
|
|
-module(testutil).
|
|
|
|
|
|
|
|
-include("../include/leveled.hrl").
|
|
|
|
|
2024-09-06 11:18:24 +01:00
|
|
|
-export([init_per_suite/1, end_per_suite/1]).
|
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
-export([book_riakput/3,
|
2024-01-22 21:22:54 +00:00
|
|
|
book_tempriakput/4,
|
2016-11-07 10:11:57 +00:00
|
|
|
book_riakdelete/4,
|
|
|
|
book_riakget/3,
|
|
|
|
book_riakhead/3,
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload/2,
|
2017-11-20 10:21:30 +00:00
|
|
|
stdload/2,
|
2018-12-05 15:18:20 +00:00
|
|
|
stdload_expiring/3,
|
|
|
|
stdload_object/6,
|
2020-03-16 12:51:14 +00:00
|
|
|
stdload_object/9,
|
2016-11-07 10:11:57 +00:00
|
|
|
reset_filestructure/0,
|
2016-10-26 20:39:16 +01:00
|
|
|
reset_filestructure/1,
|
2016-10-18 01:59:03 +01:00
|
|
|
check_bucket_stats/2,
|
2017-12-04 15:26:01 +00:00
|
|
|
checkhead_forlist/2,
|
2016-10-18 01:59:03 +01:00
|
|
|
check_forlist/2,
|
|
|
|
check_forlist/3,
|
|
|
|
check_formissinglist/2,
|
|
|
|
check_forobject/2,
|
|
|
|
check_formissingobject/3,
|
|
|
|
generate_testobject/0,
|
|
|
|
generate_testobject/5,
|
|
|
|
generate_compressibleobjects/2,
|
|
|
|
generate_smallobjects/2,
|
|
|
|
generate_objects/2,
|
|
|
|
generate_objects/5,
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects/6,
|
|
|
|
set_object/5,
|
2018-11-05 01:21:08 +00:00
|
|
|
get_bucket/1,
|
2016-10-20 12:16:17 +01:00
|
|
|
get_key/1,
|
|
|
|
get_value/1,
|
2016-11-28 22:26:09 +00:00
|
|
|
get_vclock/1,
|
2018-10-31 18:34:27 +00:00
|
|
|
get_lastmodified/1,
|
2016-10-18 01:59:03 +01:00
|
|
|
get_compressiblevalue/0,
|
2016-11-04 14:23:37 +00:00
|
|
|
get_compressiblevalue_andinteger/0,
|
2016-10-18 01:59:03 +01:00
|
|
|
get_randomindexes_generator/1,
|
2018-11-05 10:31:15 +00:00
|
|
|
get_aae_segment/1,
|
|
|
|
get_aae_segment/2,
|
2016-10-18 01:59:03 +01:00
|
|
|
name_list/0,
|
2016-10-27 00:57:19 +01:00
|
|
|
load_objects/5,
|
2017-06-19 11:36:57 +01:00
|
|
|
load_objects/6,
|
2017-11-17 18:30:51 +00:00
|
|
|
update_some_objects/3,
|
|
|
|
delete_some_objects/3,
|
2016-10-27 00:57:19 +01:00
|
|
|
put_indexed_objects/3,
|
2023-12-19 11:56:03 +00:00
|
|
|
put_indexed_objects/4,
|
2016-10-27 00:57:19 +01:00
|
|
|
put_altered_indexed_objects/3,
|
|
|
|
put_altered_indexed_objects/4,
|
2023-01-18 11:44:02 +00:00
|
|
|
put_altered_indexed_objects/5,
|
2016-10-27 00:57:19 +01:00
|
|
|
check_indexed_objects/4,
|
2016-11-01 00:46:14 +00:00
|
|
|
rotating_object_check/3,
|
2023-12-19 11:56:03 +00:00
|
|
|
rotation_withnocheck/6,
|
2016-11-03 12:11:50 +00:00
|
|
|
corrupt_journal/5,
|
|
|
|
restore_file/2,
|
|
|
|
restore_topending/2,
|
2016-11-04 11:01:37 +00:00
|
|
|
find_journals/1,
|
2016-11-18 11:53:14 +00:00
|
|
|
wait_for_compaction/1,
|
2016-11-25 17:41:08 +00:00
|
|
|
foldkeysfun/3,
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket/3,
|
2018-04-16 17:19:20 +01:00
|
|
|
sync_strategy/0,
|
2018-09-17 10:09:28 +01:00
|
|
|
riak_object/4,
|
2018-09-25 18:32:48 +01:00
|
|
|
get_value_from_objectlistitem/1,
|
2018-10-31 16:35:53 +00:00
|
|
|
numbered_key/1,
|
|
|
|
fixed_bin_key/1,
|
2020-03-16 12:51:14 +00:00
|
|
|
convert_to_seconds/1,
|
|
|
|
compact_and_wait/1]).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
-define(RETURN_TERMS, {true, undefined}).
|
2024-07-15 20:49:21 +01:00
|
|
|
-define(SLOWOFFER_DELAY, 40).
|
2016-11-28 22:26:09 +00:00
|
|
|
-define(V1_VERS, 1).
|
|
|
|
-define(MAGIC, 53). % riak_kv -> riak_object
|
|
|
|
-define(MD_VTAG, <<"X-Riak-VTag">>).
|
|
|
|
-define(MD_LASTMOD, <<"X-Riak-Last-Modified">>).
|
|
|
|
-define(MD_DELETED, <<"X-Riak-Deleted">>).
|
2020-03-15 22:14:42 +00:00
|
|
|
-define(MD_INDEX, <<"index">>).
|
2016-11-28 22:26:09 +00:00
|
|
|
-define(EMPTY_VTAG_BIN, <<"e">>).
|
2017-06-19 11:36:57 +01:00
|
|
|
-define(ROOT_PATH, "test").
|
2016-11-28 22:26:09 +00:00
|
|
|
|
2018-12-11 20:42:00 +00:00
|
|
|
-record(r_content, {
|
|
|
|
metadata,
|
|
|
|
value :: term()
|
|
|
|
}).
|
|
|
|
|
|
|
|
-record(r_object, {
|
|
|
|
bucket,
|
|
|
|
key,
|
|
|
|
contents :: [#r_content{}],
|
|
|
|
vclock,
|
|
|
|
updatemetadata=dict:store(clean, true, dict:new()),
|
|
|
|
updatevalue :: term()}).
|
2018-09-17 10:09:28 +01:00
|
|
|
|
2024-09-06 11:18:24 +01:00
|
|
|
|
|
|
|
init_per_suite(Config) ->
|
|
|
|
LogTemplate = [time, " log_level=", level, " ", msg, "\n"],
|
|
|
|
LogFormatter =
|
|
|
|
{
|
|
|
|
logger_formatter,
|
|
|
|
#{
|
|
|
|
time_designator => $\s,
|
|
|
|
template => LogTemplate
|
|
|
|
}
|
|
|
|
},
|
|
|
|
{suite, SUITEName} = lists:keyfind(suite, 1, Config),
|
|
|
|
FileName = "leveled_" ++ SUITEName ++ "_ct.log",
|
|
|
|
LogConfig =
|
|
|
|
#{
|
|
|
|
config =>
|
|
|
|
#{
|
|
|
|
file => FileName,
|
|
|
|
max_no_files => 5
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
|
|
|
LogFilter =
|
|
|
|
fun(LogEvent, LogType) ->
|
|
|
|
Meta = maps:get(meta, LogEvent),
|
|
|
|
case maps:get(log_type, Meta, not_found) of
|
|
|
|
LogType ->
|
|
|
|
LogEvent;
|
|
|
|
_ ->
|
|
|
|
ignore
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
|
|
|
|
ok = logger:add_handler(logfile, logger_std_h, LogConfig),
|
|
|
|
ok = logger:set_handler_config(logfile, formatter, LogFormatter),
|
|
|
|
ok = logger:set_handler_config(logfile, level, info),
|
|
|
|
ok = logger:add_handler_filter(logfile, type_filter, {LogFilter, backend}),
|
|
|
|
|
|
|
|
ok = logger:set_handler_config(default, level, notice),
|
|
|
|
ok = logger:set_handler_config(cth_log_redirect, level, notice),
|
|
|
|
|
|
|
|
ok = logger:set_primary_config(level, info),
|
|
|
|
|
|
|
|
Config.
|
|
|
|
|
|
|
|
end_per_suite(_Config) ->
|
|
|
|
ok = logger:remove_handler(logfile),
|
|
|
|
ok = logger:set_primary_config(level, notice),
|
|
|
|
ok = logger:set_handler_config(default, level, all),
|
|
|
|
ok = logger:set_handler_config(cth_log_redirect, level, all),
|
|
|
|
|
|
|
|
ok.
|
|
|
|
|
2018-09-17 10:09:28 +01:00
|
|
|
riak_object(Bucket, Key, Value, MetaData) ->
|
|
|
|
Content = #r_content{metadata=dict:from_list(MetaData), value=Value},
|
|
|
|
Obj = #r_object{bucket=Bucket,
|
|
|
|
key=Key,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
|
|
|
to_binary(v1, Obj).
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
%% =================================================
|
|
|
|
%% From riak_object
|
|
|
|
|
|
|
|
to_binary(v1, #r_object{contents=Contents, vclock=VClock}) ->
|
|
|
|
new_v1(VClock, Contents).
|
|
|
|
|
|
|
|
new_v1(Vclock, Siblings) ->
|
|
|
|
VclockBin = term_to_binary(Vclock),
|
|
|
|
VclockLen = byte_size(VclockBin),
|
|
|
|
SibCount = length(Siblings),
|
|
|
|
SibsBin = bin_contents(Siblings),
|
|
|
|
<<?MAGIC:8/integer, ?V1_VERS:8/integer, VclockLen:32/integer,
|
|
|
|
VclockBin/binary, SibCount:32/integer, SibsBin/binary>>.
|
|
|
|
|
|
|
|
bin_content(#r_content{metadata=Meta, value=Val}) ->
|
|
|
|
ValBin = encode_maybe_binary(Val),
|
|
|
|
ValLen = byte_size(ValBin),
|
|
|
|
MetaBin = meta_bin(Meta),
|
|
|
|
MetaLen = byte_size(MetaBin),
|
|
|
|
<<ValLen:32/integer, ValBin:ValLen/binary,
|
|
|
|
MetaLen:32/integer, MetaBin:MetaLen/binary>>.
|
|
|
|
|
|
|
|
bin_contents(Contents) ->
|
|
|
|
F = fun(Content, Acc) ->
|
|
|
|
<<Acc/binary, (bin_content(Content))/binary>>
|
|
|
|
end,
|
|
|
|
lists:foldl(F, <<>>, Contents).
|
|
|
|
|
|
|
|
meta_bin(MD) ->
|
|
|
|
{{VTagVal, Deleted, LastModVal}, RestBin} =
|
|
|
|
dict:fold(fun fold_meta_to_bin/3,
|
|
|
|
{{undefined, <<0>>, undefined}, <<>>},
|
|
|
|
MD),
|
|
|
|
VTagBin = case VTagVal of
|
|
|
|
undefined -> ?EMPTY_VTAG_BIN;
|
|
|
|
_ -> list_to_binary(VTagVal)
|
|
|
|
end,
|
|
|
|
VTagLen = byte_size(VTagBin),
|
|
|
|
LastModBin = case LastModVal of
|
|
|
|
undefined ->
|
|
|
|
<<0:32/integer, 0:32/integer, 0:32/integer>>;
|
|
|
|
{Mega,Secs,Micro} ->
|
|
|
|
<<Mega:32/integer, Secs:32/integer, Micro:32/integer>>
|
|
|
|
end,
|
|
|
|
<<LastModBin/binary, VTagLen:8/integer, VTagBin:VTagLen/binary,
|
|
|
|
Deleted:1/binary-unit:8, RestBin/binary>>.
|
|
|
|
|
|
|
|
fold_meta_to_bin(?MD_VTAG, Value, {{_Vt,Del,Lm},RestBin}) ->
|
|
|
|
{{Value, Del, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_LASTMOD, Value, {{Vt,Del,_Lm},RestBin}) ->
|
|
|
|
{{Vt, Del, Value}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_DELETED, true, {{Vt,_Del,Lm},RestBin})->
|
|
|
|
{{Vt, <<1>>, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(?MD_DELETED, "true", Acc) ->
|
|
|
|
fold_meta_to_bin(?MD_DELETED, true, Acc);
|
|
|
|
fold_meta_to_bin(?MD_DELETED, _, {{Vt,_Del,Lm},RestBin}) ->
|
|
|
|
{{Vt, <<0>>, Lm}, RestBin};
|
|
|
|
fold_meta_to_bin(Key, Value, {{_Vt,_Del,_Lm}=Elems,RestBin}) ->
|
|
|
|
ValueBin = encode_maybe_binary(Value),
|
|
|
|
ValueLen = byte_size(ValueBin),
|
|
|
|
KeyBin = encode_maybe_binary(Key),
|
|
|
|
KeyLen = byte_size(KeyBin),
|
|
|
|
MetaBin = <<KeyLen:32/integer, KeyBin/binary,
|
|
|
|
ValueLen:32/integer, ValueBin/binary>>,
|
|
|
|
{Elems, <<RestBin/binary, MetaBin/binary>>}.
|
|
|
|
|
|
|
|
encode_maybe_binary(Bin) when is_binary(Bin) ->
|
|
|
|
<<1, Bin/binary>>;
|
|
|
|
encode_maybe_binary(Bin) ->
|
|
|
|
<<0, (term_to_binary(Bin))/binary>>.
|
|
|
|
|
|
|
|
%% =================================================
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2016-11-25 17:41:08 +00:00
|
|
|
sync_strategy() ->
|
2021-05-25 13:41:20 +01:00
|
|
|
none.
|
2016-11-25 17:41:08 +00:00
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
book_riakput(Pid, RiakObject, IndexSpecs) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
leveled_bookie:book_put(
|
|
|
|
Pid,
|
|
|
|
RiakObject#r_object.bucket,
|
|
|
|
RiakObject#r_object.key,
|
|
|
|
to_binary(v1, RiakObject),
|
|
|
|
IndexSpecs,
|
|
|
|
?RIAK_TAG
|
|
|
|
).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2024-01-22 21:22:54 +00:00
|
|
|
book_tempriakput(Pid, RiakObject, IndexSpecs, TTL) ->
|
|
|
|
leveled_bookie:book_tempput(
|
|
|
|
Pid,
|
|
|
|
RiakObject#r_object.bucket,
|
|
|
|
RiakObject#r_object.key,
|
|
|
|
to_binary(v1, RiakObject),
|
|
|
|
IndexSpecs,
|
|
|
|
?RIAK_TAG,
|
2024-11-13 13:37:13 +00:00
|
|
|
TTL
|
|
|
|
).
|
2024-01-22 21:22:54 +00:00
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
book_riakdelete(Pid, Bucket, Key, IndexSpecs) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_put(Pid, Bucket, Key, delete, IndexSpecs, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
book_riakget(Pid, Bucket, Key) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_get(Pid, Bucket, Key, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
book_riakhead(Pid, Bucket, Key) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
leveled_bookie:book_head(Pid, Bucket, Key, ?RIAK_TAG).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
|
|
|
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload(Bookie, ObjectList) ->
|
|
|
|
lists:foreach(fun({_RN, Obj, Spc}) ->
|
2016-11-07 10:42:49 +00:00
|
|
|
R = book_riakput(Bookie, Obj, Spc),
|
2016-11-07 10:27:38 +00:00
|
|
|
case R of
|
|
|
|
ok -> ok;
|
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ObjectList).
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2017-11-20 10:21:30 +00:00
|
|
|
stdload(Bookie, Count) ->
|
|
|
|
stdload(Bookie, Count, []).
|
|
|
|
|
|
|
|
stdload(_Bookie, 0, Acc) ->
|
|
|
|
Acc;
|
|
|
|
stdload(Bookie, Count, Acc) ->
|
|
|
|
B = "Bucket",
|
2018-05-03 17:18:13 +01:00
|
|
|
K = leveled_util:generate_uuid(),
|
2017-11-20 10:21:30 +00:00
|
|
|
V = get_compressiblevalue(),
|
|
|
|
R = leveled_bookie:book_put(Bookie, B, K, V, [], ?STD_TAG),
|
|
|
|
case R of
|
|
|
|
ok -> ok;
|
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end,
|
|
|
|
stdload(Bookie, Count - 1, [{B, K, erlang:phash2(V)}|Acc]).
|
|
|
|
|
2018-12-05 15:18:20 +00:00
|
|
|
stdload_expiring(Book, KeyCount, When) ->
|
|
|
|
% Adds KeyCount object that will expire When seconds in the future.
|
|
|
|
% Each object will have a single entry on the <<"temp_int">> index.
|
|
|
|
ExpiryTime = leveled_util:integer_now() + When,
|
|
|
|
V = get_compressiblevalue(),
|
|
|
|
stdload_expiring(Book, KeyCount, ExpiryTime, V, []).
|
|
|
|
|
|
|
|
stdload_expiring(_Book, 0, _TLL, _V, Acc) ->
|
|
|
|
lists:sort(Acc);
|
|
|
|
stdload_expiring(Book, KeyCount, TTL, V, Acc) ->
|
|
|
|
B = <<"Bucket">>,
|
|
|
|
K = list_to_binary(leveled_util:generate_uuid()),
|
|
|
|
I = KeyCount rem 1000,
|
|
|
|
stdload_object(Book, B, K, I, V, TTL),
|
|
|
|
stdload_expiring(Book, KeyCount - 1, TTL, V, [{I, B, K}|Acc]).
|
|
|
|
|
|
|
|
stdload_object(Book, B, K, I, V, TTL) ->
|
2020-03-16 12:51:14 +00:00
|
|
|
stdload_object(Book, B, K, I, V, TTL, ?STD_TAG, true, false).
|
|
|
|
|
|
|
|
stdload_object(Book, B, K, I, V, TTL, Tag, RemovePrev2i, MustFind) ->
|
|
|
|
Obj = [{index, [I]}, {value, V}],
|
|
|
|
{IdxSpecs, Obj0} =
|
|
|
|
case {leveled_bookie:book_get(Book, B, K, Tag), MustFind} of
|
|
|
|
{{ok, PrevObj}, _} ->
|
|
|
|
{index, PrevIs} = lists:keyfind(index, 1, PrevObj),
|
|
|
|
case RemovePrev2i of
|
|
|
|
true ->
|
|
|
|
MapFun =
|
|
|
|
fun(OldI) -> {remove, <<"temp_int">>, OldI} end,
|
|
|
|
{[{add, <<"temp_int">>, I}|lists:map(MapFun, PrevIs)],
|
|
|
|
Obj};
|
|
|
|
false ->
|
|
|
|
{[{add, <<"temp_int">>, I}],
|
|
|
|
[{index, [I|PrevIs]}, {value, V}]}
|
|
|
|
end;
|
|
|
|
{not_found, false} ->
|
|
|
|
{[{add, <<"temp_int">>, I}], Obj}
|
|
|
|
end,
|
|
|
|
R =
|
|
|
|
case TTL of
|
|
|
|
infinity ->
|
|
|
|
leveled_bookie:book_put(Book, B, K, Obj0, IdxSpecs, Tag);
|
|
|
|
TTL when is_integer(TTL) ->
|
|
|
|
leveled_bookie:book_tempput(Book, B, K, Obj0,
|
|
|
|
IdxSpecs, Tag, TTL)
|
2018-12-05 15:18:20 +00:00
|
|
|
end,
|
|
|
|
case R of
|
|
|
|
ok ->
|
|
|
|
ok;
|
|
|
|
pause ->
|
|
|
|
io:format("Slow offer needed~n"),
|
|
|
|
timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end.
|
|
|
|
|
|
|
|
|
2016-11-07 10:11:57 +00:00
|
|
|
|
2020-03-16 12:51:14 +00:00
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
reset_filestructure() ->
|
2017-06-19 11:36:57 +01:00
|
|
|
reset_filestructure(0, ?ROOT_PATH).
|
2016-10-26 20:39:16 +01:00
|
|
|
|
2017-06-19 11:36:57 +01:00
|
|
|
reset_filestructure(Wait) when is_integer(Wait) ->
|
|
|
|
reset_filestructure(Wait, ?ROOT_PATH);
|
|
|
|
reset_filestructure(RootPath) when is_list(RootPath) ->
|
|
|
|
reset_filestructure(0, RootPath).
|
|
|
|
|
|
|
|
reset_filestructure(Wait, RootPath) ->
|
2024-09-02 11:17:35 +01:00
|
|
|
io:format(
|
|
|
|
"Waiting ~w ms to give a chance for all file closes "
|
|
|
|
"to complete~n",
|
|
|
|
[Wait]
|
|
|
|
),
|
2017-06-19 11:36:57 +01:00
|
|
|
timer:sleep(Wait),
|
2016-10-18 01:59:03 +01:00
|
|
|
filelib:ensure_dir(RootPath ++ "/journal/"),
|
|
|
|
filelib:ensure_dir(RootPath ++ "/ledger/"),
|
|
|
|
leveled_inker:clean_testdir(RootPath ++ "/journal"),
|
|
|
|
leveled_penciller:clean_testdir(RootPath ++ "/ledger"),
|
|
|
|
RootPath.
|
|
|
|
|
2016-11-14 11:17:14 +00:00
|
|
|
wait_for_compaction(Bookie) ->
|
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
|
|
|
lists:foldl(fun(X, Pending) ->
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
2024-09-02 11:17:35 +01:00
|
|
|
io:format(
|
|
|
|
"Loop ~w waiting for journal "
|
|
|
|
"compaction to complete~n",
|
|
|
|
[X]
|
|
|
|
),
|
2016-11-14 19:34:11 +00:00
|
|
|
timer:sleep(5000),
|
2016-11-14 11:17:14 +00:00
|
|
|
F(Bookie)
|
|
|
|
end end,
|
|
|
|
true,
|
|
|
|
lists:seq(1, 15)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
check_bucket_stats(Bookie, Bucket) ->
|
|
|
|
FoldSW1 = os:timestamp(),
|
|
|
|
io:format("Checking bucket size~n"),
|
2024-11-13 13:37:13 +00:00
|
|
|
{async, Folder1} =
|
|
|
|
leveled_bookie:book_returnfolder(Bookie, {riakbucket_stats, Bucket}),
|
2016-10-18 01:59:03 +01:00
|
|
|
{B1Size, B1Count} = Folder1(),
|
|
|
|
io:format("Bucket fold completed in ~w microseconds~n",
|
|
|
|
[timer:now_diff(os:timestamp(), FoldSW1)]),
|
|
|
|
io:format("Bucket ~s has size ~w and count ~w~n",
|
|
|
|
[Bucket, B1Size, B1Count]),
|
|
|
|
{B1Size, B1Count}.
|
|
|
|
|
|
|
|
|
|
|
|
check_forlist(Bookie, ChkList) ->
|
|
|
|
check_forlist(Bookie, ChkList, false).
|
|
|
|
|
|
|
|
check_forlist(Bookie, ChkList, Log) ->
|
|
|
|
SW = os:timestamp(),
|
2024-11-13 13:37:13 +00:00
|
|
|
lists:foreach(
|
|
|
|
fun({_RN, Obj, _Spc}) ->
|
|
|
|
if
|
|
|
|
Log == true ->
|
|
|
|
io:format("Fetching Key ~s~n", [Obj#r_object.key]);
|
|
|
|
true ->
|
|
|
|
ok
|
|
|
|
end,
|
|
|
|
R = book_riakget(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
|
|
|
true =
|
|
|
|
case R of
|
|
|
|
{ok, Val} ->
|
|
|
|
to_binary(v1, Obj) == Val;
|
|
|
|
not_found ->
|
|
|
|
io:format("Object not found for key ~s~n",
|
|
|
|
[Obj#r_object.key]),
|
|
|
|
error
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ChkList),
|
|
|
|
io:format(
|
|
|
|
"Fetch check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]
|
|
|
|
).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2017-12-04 15:26:01 +00:00
|
|
|
checkhead_forlist(Bookie, ChkList) ->
|
|
|
|
SW = os:timestamp(),
|
|
|
|
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
|
|
|
R = book_riakhead(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
|
|
|
true = case R of
|
|
|
|
{ok, _Head} ->
|
|
|
|
true;
|
|
|
|
not_found ->
|
|
|
|
io:format("Object not found for key ~s~n",
|
|
|
|
[Obj#r_object.key]),
|
|
|
|
error
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ChkList),
|
|
|
|
io:format("Head check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
check_formissinglist(Bookie, ChkList) ->
|
|
|
|
SW = os:timestamp(),
|
|
|
|
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
R = book_riakget(Bookie,
|
|
|
|
Obj#r_object.bucket,
|
|
|
|
Obj#r_object.key),
|
2016-10-18 01:59:03 +01:00
|
|
|
R = not_found end,
|
|
|
|
ChkList),
|
|
|
|
io:format("Miss check took ~w microseconds checking list of length ~w~n",
|
|
|
|
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
|
|
|
|
|
|
|
check_forobject(Bookie, TestObject) ->
|
2016-11-28 22:26:09 +00:00
|
|
|
TestBinary = to_binary(v1, TestObject),
|
|
|
|
{ok, TestBinary} = book_riakget(Bookie,
|
2016-11-07 10:11:57 +00:00
|
|
|
TestObject#r_object.bucket,
|
|
|
|
TestObject#r_object.key),
|
2016-11-28 22:26:09 +00:00
|
|
|
{ok, HeadBinary} = book_riakhead(Bookie,
|
2016-11-07 10:11:57 +00:00
|
|
|
TestObject#r_object.bucket,
|
|
|
|
TestObject#r_object.key),
|
2017-06-27 17:11:13 +01:00
|
|
|
{{_SibMetaBin, Vclock, _Hash, size}, _LMS}
|
2018-12-06 15:31:11 +00:00
|
|
|
= leveled_head:riak_extract_metadata(HeadBinary, size),
|
2017-04-04 10:02:35 +00:00
|
|
|
true = binary_to_term(Vclock) == TestObject#r_object.vclock.
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
check_formissingobject(Bookie, Bucket, Key) ->
|
2016-11-07 10:11:57 +00:00
|
|
|
not_found = book_riakget(Bookie, Bucket, Key),
|
|
|
|
not_found = book_riakhead(Bookie, Bucket, Key).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
generate_testobject() ->
|
2024-11-13 13:37:13 +00:00
|
|
|
{B1, K1, V1, Spec1, MD} =
|
|
|
|
{
|
|
|
|
<<"Bucket1">>,
|
|
|
|
<<"Key1">>,
|
|
|
|
<<"Value1">>,
|
|
|
|
[],
|
|
|
|
[{<<"MDK1">>, <<"MDV1">>}]
|
|
|
|
},
|
2016-10-18 01:59:03 +01:00
|
|
|
generate_testobject(B1, K1, V1, Spec1, MD).
|
|
|
|
|
|
|
|
generate_testobject(B, K, V, Spec, MD) ->
|
2017-07-02 19:33:18 +01:00
|
|
|
MD0 = [{?MD_LASTMOD, os:timestamp()}|MD],
|
|
|
|
Content = #r_content{metadata=dict:from_list(MD0), value=V},
|
|
|
|
{#r_object{bucket=B,
|
|
|
|
key=K,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
2016-10-18 01:59:03 +01:00
|
|
|
Spec}.
|
|
|
|
|
|
|
|
|
|
|
|
generate_compressibleobjects(Count, KeyNumber) ->
|
|
|
|
V = get_compressiblevalue(),
|
|
|
|
generate_objects(Count, KeyNumber, [], V).
|
|
|
|
|
|
|
|
|
2016-11-04 14:23:37 +00:00
|
|
|
get_compressiblevalue_andinteger() ->
|
2024-11-13 13:37:13 +00:00
|
|
|
{rand:uniform(1000), get_compressiblevalue()}.
|
2016-11-04 14:23:37 +00:00
|
|
|
|
2016-10-18 01:59:03 +01:00
|
|
|
get_compressiblevalue() ->
|
|
|
|
S1 = "111111111111111",
|
|
|
|
S2 = "222222222222222",
|
|
|
|
S3 = "333333333333333",
|
|
|
|
S4 = "aaaaaaaaaaaaaaa",
|
|
|
|
S5 = "AAAAAAAAAAAAAAA",
|
|
|
|
S6 = "GGGGGGGGGGGGGGG",
|
|
|
|
S7 = "===============",
|
|
|
|
S8 = "...............",
|
|
|
|
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
|
|
|
|
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
|
|
|
|
L = lists:seq(1, 1024),
|
2024-09-02 11:17:35 +01:00
|
|
|
iolist_to_binary(
|
|
|
|
lists:foldl(
|
|
|
|
fun(_X, Acc) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
{_, Str} = lists:keyfind(rand:uniform(8), 1, Selector),
|
2024-09-02 11:17:35 +01:00
|
|
|
[Str|Acc] end,
|
|
|
|
[""],
|
|
|
|
L
|
|
|
|
)
|
|
|
|
).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
generate_smallobjects(Count, KeyNumber) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
generate_objects(Count, KeyNumber, [], crypto:strong_rand_bytes(512)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
generate_objects(Count, KeyNumber) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
generate_objects(Count, KeyNumber, [], crypto:strong_rand_bytes(4096)).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
|
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value) ->
|
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, fun() -> [] end).
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, <<"Bucket">>).
|
2016-10-20 12:16:17 +01:00
|
|
|
|
|
|
|
generate_objects(0, _KeyNumber, ObjL, _Value, _IndexGen, _Bucket) ->
|
2018-09-06 17:50:59 +01:00
|
|
|
lists:reverse(ObjL);
|
2024-11-13 13:37:13 +00:00
|
|
|
generate_objects(
|
|
|
|
Count, binary_uuid, ObjL, Value, IndexGen, Bucket)
|
|
|
|
when is_list(Bucket) ->
|
|
|
|
generate_objects(
|
|
|
|
Count, binary_uuid, ObjL, Value, IndexGen, list_to_binary(Bucket)
|
|
|
|
);
|
|
|
|
generate_objects(
|
|
|
|
Count, binary_uuid, ObjL, Value, IndexGen, Bucket)
|
|
|
|
when is_binary(Bucket) ->
|
|
|
|
{Obj1, Spec1} =
|
|
|
|
set_object(
|
|
|
|
Bucket,
|
|
|
|
list_to_binary(leveled_util:generate_uuid()),
|
|
|
|
Value,
|
|
|
|
IndexGen
|
|
|
|
),
|
2016-11-20 21:21:31 +00:00
|
|
|
generate_objects(Count - 1,
|
|
|
|
binary_uuid,
|
2024-11-13 13:37:13 +00:00
|
|
|
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-11-20 21:21:31 +00:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, uuid, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} = set_object(Bucket,
|
2018-05-03 18:26:02 +01:00
|
|
|
leveled_util:generate_uuid(),
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
uuid,
|
2024-11-13 13:37:13 +00:00
|
|
|
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
2016-10-20 12:16:17 +01:00
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2024-11-13 13:37:13 +00:00
|
|
|
generate_objects(
|
|
|
|
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket)
|
|
|
|
when is_list(Bucket) ->
|
|
|
|
generate_objects(
|
|
|
|
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, list_to_binary(Bucket)
|
|
|
|
);
|
|
|
|
generate_objects(
|
|
|
|
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket)
|
|
|
|
when is_binary(Bucket) ->
|
2017-10-30 17:39:21 +00:00
|
|
|
{Obj1, Spec1} =
|
2024-11-13 13:37:13 +00:00
|
|
|
set_object(
|
|
|
|
Bucket,
|
|
|
|
list_to_binary(numbered_key(KeyNumber)),
|
|
|
|
Value,
|
|
|
|
IndexGen
|
|
|
|
),
|
2017-10-30 17:39:21 +00:00
|
|
|
generate_objects(Count - 1,
|
|
|
|
{binary, KeyNumber + 1},
|
2024-11-13 13:37:13 +00:00
|
|
|
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
2017-10-30 17:39:21 +00:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2018-04-16 17:19:20 +01:00
|
|
|
generate_objects(Count, {fixed_binary, KeyNumber}, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} =
|
|
|
|
set_object(Bucket,
|
|
|
|
fixed_bin_key(KeyNumber),
|
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
{fixed_binary, KeyNumber + 1},
|
2024-11-13 13:37:13 +00:00
|
|
|
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
2018-04-16 17:19:20 +01:00
|
|
|
Value,
|
|
|
|
IndexGen,
|
|
|
|
Bucket);
|
2016-10-20 12:16:17 +01:00
|
|
|
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, Bucket) ->
|
|
|
|
{Obj1, Spec1} = set_object(Bucket,
|
2018-04-16 17:19:20 +01:00
|
|
|
numbered_key(KeyNumber),
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
|
|
|
IndexGen),
|
|
|
|
generate_objects(Count - 1,
|
|
|
|
KeyNumber + 1,
|
2024-11-13 13:37:13 +00:00
|
|
|
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
2016-10-18 01:59:03 +01:00
|
|
|
Value,
|
2016-10-20 12:16:17 +01:00
|
|
|
IndexGen,
|
|
|
|
Bucket).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2018-04-16 17:19:20 +01:00
|
|
|
%% @doc generates a key, exported so tests can use it without copying
|
|
|
|
%% code
|
|
|
|
-spec numbered_key(integer()) -> list().
|
|
|
|
numbered_key(KeyNumber) when is_integer(KeyNumber) ->
|
|
|
|
"Key" ++ integer_to_list(KeyNumber).
|
|
|
|
|
|
|
|
%% @doc generates a key for `KeyNumber' of a fixed size (64bits),
|
|
|
|
%% again, exported for tests to generate the same keys as
|
|
|
|
%% generate_objects/N without peeking.
|
|
|
|
-spec fixed_bin_key(integer()) -> binary().
|
|
|
|
fixed_bin_key(KeyNumber) ->
|
|
|
|
<<$K, $e, $y, KeyNumber:64/integer>>.
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
set_object(Bucket, Key, Value, IndexGen) ->
|
|
|
|
set_object(Bucket, Key, Value, IndexGen, []).
|
|
|
|
|
|
|
|
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
|
2020-03-15 22:14:42 +00:00
|
|
|
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, []).
|
|
|
|
|
|
|
|
set_object(Bucket, Key, Value, IndexGen, Indexes2Remove, IndexesNotToRemove) ->
|
|
|
|
IdxSpecs = IndexGen(),
|
|
|
|
Indexes =
|
2024-09-02 11:17:35 +01:00
|
|
|
lists:map(
|
|
|
|
fun({add, IdxF, IdxV}) -> {IdxF, IdxV} end,
|
|
|
|
lists:flatten([IndexesNotToRemove, IdxSpecs])
|
|
|
|
),
|
2016-10-20 12:16:17 +01:00
|
|
|
Obj = {Bucket,
|
2016-10-18 01:59:03 +01:00
|
|
|
Key,
|
|
|
|
Value,
|
2024-09-02 11:17:35 +01:00
|
|
|
lists:flatten(
|
|
|
|
IdxSpecs,
|
|
|
|
lists:map(
|
|
|
|
fun({add, IdxF, IdxV}) -> {remove, IdxF, IdxV} end,
|
|
|
|
Indexes2Remove
|
|
|
|
)
|
|
|
|
),
|
|
|
|
[{<<"MDK">>, iolist_to_binary([<<"MDV">>, Key])},
|
|
|
|
{<<"MDK2">>, iolist_to_binary([<<"MDV">>, Key])},
|
2020-03-15 22:14:42 +00:00
|
|
|
{?MD_LASTMOD, os:timestamp()},
|
|
|
|
{?MD_INDEX, Indexes}]},
|
2020-03-15 23:15:09 +00:00
|
|
|
{B1, K1, V1, DeltaSpecs, MD} = Obj,
|
2016-11-28 22:26:09 +00:00
|
|
|
Content = #r_content{metadata=dict:from_list(MD), value=V1},
|
|
|
|
{#r_object{bucket=B1,
|
|
|
|
key=K1,
|
|
|
|
contents=[Content],
|
|
|
|
vclock=generate_vclock()},
|
2020-03-15 23:15:09 +00:00
|
|
|
DeltaSpecs}.
|
2016-10-18 01:59:03 +01:00
|
|
|
|
2018-09-25 18:32:48 +01:00
|
|
|
get_value_from_objectlistitem({_Int, Obj, _Spc}) ->
|
|
|
|
[Content] = Obj#r_object.contents,
|
|
|
|
Content#r_content.value.
|
|
|
|
|
2017-11-17 18:30:51 +00:00
|
|
|
update_some_objects(Bookie, ObjList, SampleSize) ->
|
|
|
|
StartWatchA = os:timestamp(),
|
|
|
|
ToUpdateList = lists:sublist(lists:sort(ObjList), SampleSize),
|
|
|
|
UpdateFun =
|
|
|
|
fun({R, Obj, Spec}) ->
|
|
|
|
VC = Obj#r_object.vclock,
|
|
|
|
VC0 = update_vclock(VC),
|
|
|
|
[C] = Obj#r_object.contents,
|
2018-10-31 21:37:53 +00:00
|
|
|
MD = C#r_content.metadata,
|
|
|
|
MD0 = dict:store(?MD_LASTMOD, os:timestamp(), MD),
|
2024-11-13 13:37:13 +00:00
|
|
|
C0 = C#r_content{value = crypto:strong_rand_bytes(512),
|
2018-10-31 21:37:53 +00:00
|
|
|
metadata = MD0},
|
2017-11-17 18:30:51 +00:00
|
|
|
UpdObj = Obj#r_object{vclock = VC0, contents = [C0]},
|
|
|
|
{R, UpdObj, Spec}
|
|
|
|
end,
|
|
|
|
UpdatedObjList = lists:map(UpdateFun, ToUpdateList),
|
|
|
|
riakload(Bookie, UpdatedObjList),
|
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects updates in ~w seconds~n",
|
|
|
|
[SampleSize, Time/1000000]).
|
|
|
|
|
|
|
|
delete_some_objects(Bookie, ObjList, SampleSize) ->
|
|
|
|
StartWatchA = os:timestamp(),
|
|
|
|
ToDeleteList = lists:sublist(lists:sort(ObjList), SampleSize),
|
|
|
|
DeleteFun =
|
|
|
|
fun({_R, Obj, Spec}) ->
|
|
|
|
B = Obj#r_object.bucket,
|
|
|
|
K = Obj#r_object.key,
|
|
|
|
book_riakdelete(Bookie, B, K, Spec)
|
|
|
|
end,
|
|
|
|
lists:foreach(DeleteFun, ToDeleteList),
|
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects deleted in ~w seconds~n",
|
|
|
|
[SampleSize, Time/1000000]).
|
2016-11-28 22:26:09 +00:00
|
|
|
|
|
|
|
generate_vclock() ->
|
|
|
|
lists:map(fun(X) ->
|
2024-11-13 13:37:13 +00:00
|
|
|
{_, Actor} = lists:keyfind(rand:uniform(10),
|
2016-11-28 22:26:09 +00:00
|
|
|
1,
|
|
|
|
actor_list()),
|
|
|
|
{Actor, X} end,
|
2024-11-13 13:37:13 +00:00
|
|
|
lists:seq(1, rand:uniform(8))).
|
2016-11-28 22:26:09 +00:00
|
|
|
|
2017-11-17 18:30:51 +00:00
|
|
|
update_vclock(VC) ->
|
|
|
|
[{Actor, X}|Rest] = VC,
|
|
|
|
[{Actor, X + 1}|Rest].
|
2016-11-28 22:26:09 +00:00
|
|
|
|
|
|
|
actor_list() ->
|
|
|
|
[{1, albert}, {2, bertie}, {3, clara}, {4, dave}, {5, elton},
|
|
|
|
{6, fred}, {7, george}, {8, harry}, {9, isaac}, {10, leila}].
|
|
|
|
|
2018-11-05 01:21:08 +00:00
|
|
|
get_bucket(Object) ->
|
|
|
|
Object#r_object.bucket.
|
|
|
|
|
2016-10-20 12:16:17 +01:00
|
|
|
get_key(Object) ->
|
|
|
|
Object#r_object.key.
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
get_value(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<_VclockBin:VclockLen/binary, SibCount:32/integer, SibsBin/binary>> = Rest1,
|
|
|
|
case SibCount of
|
|
|
|
1 ->
|
|
|
|
<<SibLength:32/integer, Rest2/binary>> = SibsBin,
|
|
|
|
<<ContentBin:SibLength/binary, _MetaBin/binary>> = Rest2,
|
|
|
|
case ContentBin of
|
2024-09-02 11:17:35 +01:00
|
|
|
<<0:8/integer, ContentBin0/binary>> ->
|
|
|
|
binary_to_term(ContentBin0);
|
|
|
|
<<1:8/integer, ContentAsIs/binary>> ->
|
|
|
|
ContentAsIs
|
2016-11-28 22:26:09 +00:00
|
|
|
end;
|
|
|
|
N ->
|
|
|
|
io:format("SibCount of ~w with ObjectBin ~w~n", [N, ObjectBin]),
|
|
|
|
error
|
|
|
|
end.
|
|
|
|
|
2018-10-31 18:34:27 +00:00
|
|
|
get_lastmodified(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<_VclockBin:VclockLen/binary, SibCount:32/integer, SibsBin/binary>> = Rest1,
|
|
|
|
case SibCount of
|
|
|
|
1 ->
|
|
|
|
<<SibLength:32/integer, Rest2/binary>> = SibsBin,
|
|
|
|
<<_ContentBin:SibLength/binary,
|
|
|
|
MetaLength:32/integer,
|
|
|
|
MetaBin:MetaLength/binary,
|
|
|
|
_Rest3/binary>> = Rest2,
|
|
|
|
<<MegaSec:32/integer,
|
|
|
|
Sec:32/integer,
|
|
|
|
MicroSec:32/integer,
|
|
|
|
_RestMetaBin/binary>> = MetaBin,
|
|
|
|
{MegaSec, Sec, MicroSec}
|
|
|
|
end.
|
|
|
|
|
2016-11-28 22:26:09 +00:00
|
|
|
get_vclock(ObjectBin) ->
|
|
|
|
<<_Magic:8/integer, _Vers:8/integer, VclockLen:32/integer,
|
|
|
|
Rest1/binary>> = ObjectBin,
|
|
|
|
<<VclockBin:VclockLen/binary, _Bin/binary>> = Rest1,
|
|
|
|
binary_to_term(VclockBin).
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator) ->
|
2017-06-19 11:36:57 +01:00
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator, 1000).
|
|
|
|
|
|
|
|
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator, SubListL) ->
|
2016-10-18 01:59:03 +01:00
|
|
|
lists:map(fun(KN) ->
|
|
|
|
ObjListA = Generator(ChunkSize, KN),
|
|
|
|
StartWatchA = os:timestamp(),
|
2016-11-07 10:27:38 +00:00
|
|
|
riakload(Bookie, ObjListA),
|
2016-10-18 01:59:03 +01:00
|
|
|
Time = timer:now_diff(os:timestamp(), StartWatchA),
|
|
|
|
io:format("~w objects loaded in ~w seconds~n",
|
|
|
|
[ChunkSize, Time/1000000]),
|
|
|
|
if
|
|
|
|
TestObject == no_check ->
|
|
|
|
ok;
|
|
|
|
true ->
|
|
|
|
check_forobject(Bookie, TestObject)
|
|
|
|
end,
|
2017-06-19 11:36:57 +01:00
|
|
|
lists:sublist(ObjListA, SubListL) end,
|
2016-10-18 01:59:03 +01:00
|
|
|
GenList).
|
|
|
|
|
|
|
|
|
|
|
|
get_randomindexes_generator(Count) ->
|
2024-07-15 20:49:21 +01:00
|
|
|
Generator =
|
|
|
|
fun() ->
|
|
|
|
lists:map(
|
|
|
|
fun(X) ->
|
|
|
|
{add,
|
2024-09-02 11:17:35 +01:00
|
|
|
iolist_to_binary(["idx", integer_to_list(X), "_bin"]),
|
|
|
|
iolist_to_binary([get_randomdate(), get_randomname()])}
|
2024-07-15 20:49:21 +01:00
|
|
|
end,
|
|
|
|
lists:seq(1, Count))
|
2016-10-18 01:59:03 +01:00
|
|
|
end,
|
|
|
|
Generator.
|
|
|
|
|
|
|
|
name_list() ->
|
|
|
|
[{1, "Sophia"}, {2, "Emma"}, {3, "Olivia"}, {4, "Ava"},
|
2024-07-15 20:49:21 +01:00
|
|
|
{5, "Isabella"}, {6, "Mia"}, {7, "Zoe"}, {8, "Lily"},
|
|
|
|
{9, "Emily"}, {10, "Madelyn"}, {11, "Madison"}, {12, "Chloe"},
|
|
|
|
{13, "Charlotte"}, {14, "Aubrey"}, {15, "Avery"},
|
|
|
|
{16, "Abigail"}].
|
2016-10-18 01:59:03 +01:00
|
|
|
|
|
|
|
get_randomname() ->
|
|
|
|
NameList = name_list(),
|
2024-11-13 13:37:13 +00:00
|
|
|
N = rand:uniform(16),
|
2016-10-18 01:59:03 +01:00
|
|
|
{N, Name} = lists:keyfind(N, 1, NameList),
|
|
|
|
Name.
|
|
|
|
|
|
|
|
get_randomdate() ->
|
|
|
|
LowTime = 60000000000,
|
|
|
|
HighTime = 70000000000,
|
2024-11-13 13:37:13 +00:00
|
|
|
RandPoint = LowTime + rand:uniform(HighTime - LowTime),
|
2016-10-18 01:59:03 +01:00
|
|
|
Date = calendar:gregorian_seconds_to_datetime(RandPoint),
|
|
|
|
{{Year, Month, Day}, {Hour, Minute, Second}} = Date,
|
|
|
|
lists:flatten(io_lib:format("~4..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
|
2016-10-27 00:57:19 +01:00
|
|
|
[Year, Month, Day, Hour, Minute, Second])).
|
|
|
|
|
|
|
|
|
2018-05-02 00:23:26 +01:00
|
|
|
foldkeysfun(_Bucket, Item, Acc) -> [Item|Acc].
|
2016-11-18 11:53:14 +00:00
|
|
|
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket(Bucket, {Term, Key}, Acc) ->
|
2018-05-02 00:23:26 +01:00
|
|
|
[{Term, {Bucket, Key}}|Acc];
|
2017-07-03 18:03:13 +01:00
|
|
|
foldkeysfun_returnbucket(Bucket, Key, Acc) ->
|
2018-05-02 00:23:26 +01:00
|
|
|
[{Bucket, Key}|Acc].
|
2017-07-03 18:03:13 +01:00
|
|
|
|
2016-10-27 00:57:19 +01:00
|
|
|
check_indexed_objects(Book, B, KSpecL, V) ->
|
|
|
|
% Check all objects match, return what should be the results of an all
|
|
|
|
% index query
|
2023-01-18 11:44:02 +00:00
|
|
|
IdxR =
|
|
|
|
lists:map(
|
|
|
|
fun({K, Spc}) ->
|
|
|
|
{ok, O} = book_riakget(Book, B, K),
|
|
|
|
V = testutil:get_value(O),
|
2024-07-15 20:49:21 +01:00
|
|
|
{add, <<"idx1_bin">>, IdxVal} = lists:keyfind(add, 1, Spc),
|
2023-01-18 11:44:02 +00:00
|
|
|
{IdxVal, K}
|
|
|
|
end,
|
|
|
|
KSpecL),
|
2016-10-27 00:57:19 +01:00
|
|
|
% Check the all index query matches expectations
|
2023-01-18 11:44:02 +00:00
|
|
|
R =
|
|
|
|
leveled_bookie:book_returnfolder(
|
|
|
|
Book,
|
|
|
|
{index_query,
|
|
|
|
B,
|
|
|
|
{fun foldkeysfun/3, []},
|
2024-07-15 20:49:21 +01:00
|
|
|
{<<"idx1_bin">>, <<"0">>, <<"|">>},
|
2023-01-18 11:44:02 +00:00
|
|
|
?RETURN_TERMS}),
|
2016-10-27 00:57:19 +01:00
|
|
|
SW = os:timestamp(),
|
|
|
|
{async, Fldr} = R,
|
|
|
|
QR0 = Fldr(),
|
2023-01-18 11:44:02 +00:00
|
|
|
io:format(
|
|
|
|
"Query match found of length ~w in ~w microseconds "
|
|
|
|
"expected ~w ~n",
|
|
|
|
[length(QR0), timer:now_diff(os:timestamp(), SW), length(IdxR)]),
|
2016-10-27 00:57:19 +01:00
|
|
|
QR = lists:sort(QR0),
|
|
|
|
ER = lists:sort(IdxR),
|
|
|
|
|
2023-01-18 11:44:02 +00:00
|
|
|
ok = if ER == QR -> ok end,
|
2016-10-27 00:57:19 +01:00
|
|
|
ok.
|
|
|
|
|
|
|
|
|
|
|
|
put_indexed_objects(Book, Bucket, Count) ->
|
2023-01-18 11:44:02 +00:00
|
|
|
V = get_compressiblevalue(),
|
2023-12-19 11:56:03 +00:00
|
|
|
put_indexed_objects(Book, Bucket, Count, V).
|
|
|
|
|
|
|
|
put_indexed_objects(Book, Bucket, Count, V) ->
|
2023-01-18 11:44:02 +00:00
|
|
|
IndexGen = get_randomindexes_generator(1),
|
2016-10-27 00:57:19 +01:00
|
|
|
SW = os:timestamp(),
|
2023-01-18 11:44:02 +00:00
|
|
|
ObjL1 =
|
|
|
|
generate_objects(Count, uuid, [], V, IndexGen, Bucket),
|
|
|
|
KSpecL =
|
|
|
|
lists:map(
|
|
|
|
fun({_RN, Obj, Spc}) ->
|
2024-11-30 13:16:13 +00:00
|
|
|
R = book_riakput(Book,Obj, Spc),
|
|
|
|
case R of
|
|
|
|
ok -> ok;
|
|
|
|
pause -> timer:sleep(?SLOWOFFER_DELAY)
|
|
|
|
end,
|
2023-01-18 11:44:02 +00:00
|
|
|
{testutil:get_key(Obj), Spc}
|
|
|
|
end,
|
|
|
|
ObjL1),
|
|
|
|
io:format(
|
|
|
|
"Put of ~w objects with ~w index entries "
|
|
|
|
"each completed in ~w microseconds~n",
|
|
|
|
[Count, 1, timer:now_diff(os:timestamp(), SW)]),
|
2016-10-27 00:57:19 +01:00
|
|
|
{KSpecL, V}.
|
|
|
|
|
|
|
|
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL) ->
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, true).
|
|
|
|
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i) ->
|
2020-03-15 22:14:42 +00:00
|
|
|
V = get_compressiblevalue(),
|
2023-01-18 11:44:02 +00:00
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V).
|
|
|
|
|
|
|
|
put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i, V) ->
|
2024-07-15 20:49:21 +01:00
|
|
|
SW = os:timestamp(),
|
2023-01-18 11:44:02 +00:00
|
|
|
IndexGen = get_randomindexes_generator(1),
|
2024-07-15 20:49:21 +01:00
|
|
|
ThisProcess = self(),
|
2020-03-15 22:14:42 +00:00
|
|
|
FindAdditionFun = fun(SpcItem) -> element(1, SpcItem) == add end,
|
|
|
|
MapFun =
|
2024-07-15 20:49:21 +01:00
|
|
|
fun({K, Spc}, Acc) ->
|
2020-03-15 23:15:09 +00:00
|
|
|
OldSpecs = lists:filter(FindAdditionFun, Spc),
|
2020-03-15 22:14:42 +00:00
|
|
|
{RemoveSpc, AddSpc} =
|
|
|
|
case RemoveOld2i of
|
|
|
|
true ->
|
2020-03-15 23:15:09 +00:00
|
|
|
{OldSpecs, []};
|
2020-03-15 22:14:42 +00:00
|
|
|
false ->
|
2020-03-15 23:15:09 +00:00
|
|
|
{[], OldSpecs}
|
2020-03-15 22:14:42 +00:00
|
|
|
end,
|
2024-07-15 20:49:21 +01:00
|
|
|
PutFun =
|
|
|
|
fun() ->
|
|
|
|
{O, DeltaSpecs} =
|
|
|
|
set_object(
|
|
|
|
Bucket, K, V, IndexGen, RemoveSpc, AddSpc),
|
|
|
|
% DeltaSpecs should be new indexes added, and any old
|
|
|
|
% indexes which have been removed by this change where
|
|
|
|
% RemoveOld2i is true.
|
|
|
|
%
|
|
|
|
% The actual indexes within the object should reflect any
|
|
|
|
% history of indexes i.e. when RemoveOld2i is false.
|
|
|
|
%
|
|
|
|
% The [{Key, SpecL}] returned should accrue additions over
|
|
|
|
% loops if RemoveOld2i is false
|
|
|
|
R =
|
|
|
|
case book_riakput(Book, O, DeltaSpecs) of
|
|
|
|
ok ->
|
|
|
|
ok;
|
|
|
|
pause ->
|
|
|
|
timer:sleep(?SLOWOFFER_DELAY),
|
|
|
|
pause
|
|
|
|
end,
|
|
|
|
ThisProcess ! {R, DeltaSpecs}
|
|
|
|
end,
|
|
|
|
spawn(PutFun),
|
|
|
|
AccOut =
|
|
|
|
receive
|
|
|
|
{ok, NewSpecs} -> Acc;
|
|
|
|
{pause, NewSpecs} -> Acc + 1
|
|
|
|
end,
|
2020-03-15 23:15:09 +00:00
|
|
|
% Note that order in the SpecL is important, as
|
|
|
|
% check_indexed_objects, needs to find the latest item added
|
2024-09-02 11:17:35 +01:00
|
|
|
{{K, lists:append(NewSpecs, AddSpc)}, AccOut}
|
2020-03-15 22:14:42 +00:00
|
|
|
end,
|
2024-07-15 20:49:21 +01:00
|
|
|
{RplKSpecL, Pauses} = lists:mapfoldl(MapFun, 0, KSpecL),
|
|
|
|
io:format(
|
|
|
|
"Altering ~w objects took ~w ms with ~w pauses~n",
|
|
|
|
[length(KSpecL), timer:now_diff(os:timestamp(), SW) div 1000, Pauses]
|
|
|
|
),
|
2016-10-27 00:57:19 +01:00
|
|
|
{RplKSpecL, V}.
|
|
|
|
|
|
|
|
rotating_object_check(RootPath, B, NumberOfObjects) ->
|
2016-11-02 12:58:27 +00:00
|
|
|
BookOpts = [{root_path, RootPath},
|
|
|
|
{cache_size, 1000},
|
2016-11-25 17:41:08 +00:00
|
|
|
{max_journalsize, 5000000},
|
|
|
|
{sync_strategy, sync_strategy()}],
|
2016-10-27 00:57:19 +01:00
|
|
|
{ok, Book1} = leveled_bookie:book_start(BookOpts),
|
2020-03-15 22:14:42 +00:00
|
|
|
{KSpcL1, V1} = put_indexed_objects(Book1, B, NumberOfObjects),
|
|
|
|
ok = check_indexed_objects(Book1, B, KSpcL1, V1),
|
|
|
|
{KSpcL2, V2} = put_altered_indexed_objects(Book1, B, KSpcL1),
|
|
|
|
ok = check_indexed_objects(Book1, B, KSpcL2, V2),
|
|
|
|
{KSpcL3, V3} = put_altered_indexed_objects(Book1, B, KSpcL2),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = leveled_bookie:book_close(Book1),
|
|
|
|
{ok, Book2} = leveled_bookie:book_start(BookOpts),
|
2020-03-15 22:14:42 +00:00
|
|
|
ok = check_indexed_objects(Book2, B, KSpcL3, V3),
|
|
|
|
{KSpcL4, V4} = put_altered_indexed_objects(Book2, B, KSpcL3),
|
|
|
|
ok = check_indexed_objects(Book2, B, KSpcL4, V4),
|
2016-11-18 15:53:22 +00:00
|
|
|
Query = {keylist, ?RIAK_TAG, B, {fun foldkeysfun/3, []}},
|
|
|
|
{async, BList} = leveled_bookie:book_returnfolder(Book2, Query),
|
2016-11-17 15:55:29 +00:00
|
|
|
true = NumberOfObjects == length(BList()),
|
2016-10-27 00:57:19 +01:00
|
|
|
ok = leveled_bookie:book_close(Book2),
|
|
|
|
ok.
|
|
|
|
|
2023-12-19 11:56:03 +00:00
|
|
|
rotation_withnocheck(Book1, B, NumberOfObjects, V1, V2, V3) ->
|
|
|
|
{KSpcL1, _V1} = put_indexed_objects(Book1, B, NumberOfObjects, V1),
|
|
|
|
{KSpcL2, _V2} = put_altered_indexed_objects(Book1, B, KSpcL1, true, V2),
|
|
|
|
{_KSpcL3, _V3} = put_altered_indexed_objects(Book1, B, KSpcL2, true, V3),
|
|
|
|
ok.
|
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
corrupt_journal(RootPath, FileName, Corruptions, BasePosition, GapSize) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
BackupPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".bak",
|
2016-11-28 22:26:09 +00:00
|
|
|
io:format("Corruption attempt to be made to filename ~s ~w ~w~n",
|
|
|
|
[FileName,
|
|
|
|
filelib:is_file(OriginalPath),
|
|
|
|
filelib:is_file(BackupPath)]),
|
2016-11-03 12:11:50 +00:00
|
|
|
{ok, _BytesCopied} = file:copy(OriginalPath, BackupPath),
|
|
|
|
{ok, Handle} = file:open(OriginalPath, [binary, raw, read, write]),
|
2016-11-01 00:46:14 +00:00
|
|
|
lists:foreach(fun(X) ->
|
2016-11-03 12:11:50 +00:00
|
|
|
Position = X * GapSize + BasePosition,
|
2016-11-01 00:46:14 +00:00
|
|
|
ok = file:pwrite(Handle, Position, <<0:8/integer>>)
|
|
|
|
end,
|
|
|
|
lists:seq(1, Corruptions)),
|
|
|
|
ok = file:close(Handle).
|
|
|
|
|
2016-11-03 12:11:50 +00:00
|
|
|
|
|
|
|
restore_file(RootPath, FileName) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
BackupPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".bak",
|
|
|
|
file:copy(BackupPath, OriginalPath).
|
|
|
|
|
|
|
|
restore_topending(RootPath, FileName) ->
|
|
|
|
OriginalPath = RootPath ++ "/journal/journal_files/" ++ FileName,
|
|
|
|
PndPath = RootPath ++ "/journal/journal_files/" ++
|
|
|
|
filename:basename(FileName, ".cdb") ++ ".pnd",
|
|
|
|
ok = file:rename(OriginalPath, PndPath),
|
|
|
|
false = filelib:is_file(OriginalPath).
|
|
|
|
|
2016-11-01 00:46:14 +00:00
|
|
|
find_journals(RootPath) ->
|
|
|
|
{ok, FNsA_J} = file:list_dir(RootPath ++ "/journal/journal_files"),
|
2020-03-17 17:29:59 +00:00
|
|
|
% Must not return a file with the .pnd extension
|
|
|
|
CDBFiles =
|
|
|
|
lists:filter(fun(FN) -> filename:extension(FN) == ".cdb" end, FNsA_J),
|
2016-11-04 11:01:37 +00:00
|
|
|
CDBFiles.
|
|
|
|
|
2018-10-31 16:35:53 +00:00
|
|
|
convert_to_seconds({MegaSec, Seconds, _MicroSec}) ->
|
2018-11-05 10:31:15 +00:00
|
|
|
MegaSec * 1000000 + Seconds.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
get_aae_segment(Obj) ->
|
|
|
|
get_aae_segment(testutil:get_bucket(Obj), testutil:get_key(Obj)).
|
|
|
|
|
|
|
|
get_aae_segment({Type, Bucket}, Key) ->
|
|
|
|
leveled_tictac:keyto_segment32(<<Type/binary, Bucket/binary, Key/binary>>);
|
|
|
|
get_aae_segment(Bucket, Key) ->
|
2020-03-16 12:51:14 +00:00
|
|
|
leveled_tictac:keyto_segment32(<<Bucket/binary, Key/binary>>).
|
|
|
|
|
|
|
|
compact_and_wait(Book) ->
|
|
|
|
compact_and_wait(Book, 20000).
|
|
|
|
|
|
|
|
compact_and_wait(Book, WaitForDelete) ->
|
|
|
|
ok = leveled_bookie:book_compactjournal(Book, 30000),
|
|
|
|
F = fun leveled_bookie:book_islastcompactionpending/1,
|
2024-09-02 11:17:35 +01:00
|
|
|
lists:foldl(
|
|
|
|
fun(X, Pending) ->
|
|
|
|
case Pending of
|
|
|
|
false ->
|
|
|
|
false;
|
|
|
|
true ->
|
|
|
|
io:format(
|
|
|
|
"Loop ~w waiting for journal "
|
|
|
|
"compaction to complete~n",
|
|
|
|
[X]
|
|
|
|
),
|
|
|
|
timer:sleep(20000),
|
|
|
|
F(Book)
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
true,
|
|
|
|
lists:seq(1, 15)),
|
2020-03-16 12:51:14 +00:00
|
|
|
io:format("Waiting for journal deletes~n"),
|
|
|
|
timer:sleep(WaitForDelete).
|