Support for 2i query part1

Added basic support for 2i query.  This involved some refactoring of the
test code to share functions between suites.

There is sill a need for a Part 2 as no tests currently cover removal of
index entries.
This commit is contained in:
martinsumner 2016-10-18 01:59:03 +01:00
parent ac0504e79e
commit 3e475f46e8
11 changed files with 682 additions and 288 deletions

View file

@ -124,7 +124,7 @@
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,
@ -348,6 +348,17 @@ handle_call({return_folder, FolderType}, _From, State) ->
State#state.ledger_cache,
Bucket,
?RIAK_TAG),
State};
{index_query,
Bucket,
{IdxField, StartValue, EndValue},
{ReturnTerms, TermRegex}} ->
{reply,
index_query(State#state.penciller,
State#state.ledger_cache,
Bucket,
{IdxField, StartValue, EndValue},
{ReturnTerms, TermRegex}),
State}
end;
handle_call({compact_journal, Timeout}, _From, State) ->
@ -408,6 +419,41 @@ bucket_stats(Penciller, LedgerCache, Bucket, Tag) ->
end,
{async, Folder}.
index_query(Penciller, LedgerCache,
Bucket,
{IdxField, StartValue, EndValue},
{ReturnTerms, TermRegex}) ->
PCLopts = #penciller_options{start_snapshot=true,
source_penciller=Penciller},
{ok, LedgerSnapshot} = leveled_penciller:pcl_start(PCLopts),
Folder = fun() ->
Increment = gb_trees:to_list(LedgerCache),
io:format("Length of increment in snapshot is ~w~n",
[length(Increment)]),
ok = leveled_penciller:pcl_loadsnapshot(LedgerSnapshot,
{infinity, Increment}),
StartKey = leveled_codec:to_ledgerkey(Bucket, null, ?IDX_TAG,
IdxField, StartValue),
EndKey = leveled_codec:to_ledgerkey(Bucket, null, ?IDX_TAG,
IdxField, EndValue),
AddFun = case ReturnTerms of
true ->
fun add_terms/3;
_ ->
fun add_keys/3
end,
AccFun = accumulate_index(TermRegex, AddFun),
Acc = leveled_penciller:pcl_fetchkeys(LedgerSnapshot,
StartKey,
EndKey,
AccFun,
[]),
ok = leveled_penciller:pcl_close(LedgerSnapshot),
Acc
end,
{async, Folder}.
shutdown_wait([], _Inker) ->
false;
shutdown_wait([TopPause|Rest], Inker) ->
@ -476,6 +522,47 @@ accumulate_size(Key, Value, {Size, Count}) ->
{Size, Count}
end.
add_keys(ObjKey, _IdxValue, Acc) ->
Acc ++ [ObjKey].
add_terms(ObjKey, IdxValue, Acc) ->
Acc ++ [{IdxValue, ObjKey}].
accumulate_index(TermRe, AddFun) ->
case TermRe of
undefined ->
fun(Key, Value, Acc) ->
case leveled_codec:is_active(Key, Value) of
true ->
{_Bucket,
ObjKey,
IdxValue} = leveled_codec:from_ledgerkey(Key),
AddFun(ObjKey, IdxValue, Acc);
false ->
Acc
end end;
TermRe ->
fun(Key, Value, Acc) ->
case leveled_codec:is_active(Key, Value) of
true ->
{_Bucket,
ObjKey,
IdxValue} = leveled_codec:from_ledgerkey(Key),
case re:run(IdxValue, TermRe) of
nomatch ->
Acc;
_ ->
AddFun(ObjKey, IdxValue, Acc)
end;
false ->
Acc
end end
end.
preparefor_ledgercache(PK, SQN, Obj, Size, IndexSpecs) ->
{Bucket, Key, PrimaryChange} = leveled_codec:generate_ledgerkv(PK,
SQN,

View file

@ -44,7 +44,7 @@
-module(leveled_cdb).
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,

View file

@ -28,7 +28,7 @@
-module(leveled_codec).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-include_lib("eunit/include/eunit.hrl").
@ -43,14 +43,24 @@
key_dominates/2,
print_key/1,
to_ledgerkey/3,
to_ledgerkey/5,
from_ledgerkey/1,
build_metadata_object/2,
generate_ledgerkv/4,
generate_ledgerkv/5,
get_size/2,
convert_indexspecs/4,
riakto_keydetails/1]).
riakto_keydetails/1,
generate_uuid/0]).
%% Credit to
%% https://github.com/afiskon/erlang-uuid-v4/blob/master/src/uuid.erl
generate_uuid() ->
<<A:32, B:16, C:16, D:16, E:48>> = crypto:rand_bytes(16),
io_lib:format("~8.16.0b-~4.16.0b-4~3.16.0b-~4.16.0b-~12.16.0b",
[A, B, C band 16#0fff, D band 16#3fff bor 16#8000, E]).
strip_to_keyonly({keyonly, K}) -> K;
strip_to_keyonly({K, _V}) -> K.
@ -87,6 +97,13 @@ is_active(Key, Value) ->
false
end.
from_ledgerkey({Tag, Bucket, {_IdxField, IdxValue}, Key})
when Tag == ?IDX_TAG ->
{Bucket, Key, IdxValue}.
to_ledgerkey(Bucket, Key, Tag, Field, Value) when Tag == ?IDX_TAG ->
{?IDX_TAG, Bucket, {Field, Value}, Key}.
to_ledgerkey(Bucket, Key, Tag) ->
{Tag, Bucket, Key, null}.
@ -132,7 +149,7 @@ endkey_passed(EndKey, CheckingKey) ->
EndKey < CheckingKey.
convert_indexspecs(IndexSpecs, Bucket, Key, SQN) ->
lists:map(fun({IndexOp, IndexField, IndexValue}) ->
lists:map(fun({IndexOp, IdxField, IdxValue}) ->
Status = case IndexOp of
add ->
%% TODO: timestamp support
@ -141,7 +158,8 @@ convert_indexspecs(IndexSpecs, Bucket, Key, SQN) ->
%% TODO: timestamps for delayed reaping
{tomb, infinity}
end,
{{i, Bucket, {IndexField, IndexValue}, Key},
{to_ledgerkey(Bucket, Key, ?IDX_TAG,
IdxField, IdxValue),
{SQN, Status, null}}
end,
IndexSpecs).

View file

@ -4,7 +4,7 @@
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,

View file

@ -76,7 +76,7 @@
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,
@ -669,20 +669,14 @@ filepath(RootPath, journal_compact_dir) ->
filepath(RootPath, NewSQN, new_journal) ->
filename:join(filepath(RootPath, journal_dir),
integer_to_list(NewSQN) ++ "_"
++ generate_uuid()
++ leveled_codec:generate_uuid()
++ "." ++ ?PENDING_FILEX);
filepath(CompactFilePath, NewSQN, compact_journal) ->
filename:join(CompactFilePath,
integer_to_list(NewSQN) ++ "_"
++ generate_uuid()
++ leveled_codec:generate_uuid()
++ "." ++ ?PENDING_FILEX).
%% Credit to
%% https://github.com/afiskon/erlang-uuid-v4/blob/master/src/uuid.erl
generate_uuid() ->
<<A:32, B:16, C:16, D:16, E:48>> = crypto:rand_bytes(16),
io_lib:format("~8.16.0b-~4.16.0b-4~3.16.0b-~4.16.0b-~12.16.0b",
[A, B, C band 16#0fff, D band 16#3fff bor 16#8000, E]).
simple_manifest_reader(SQN, RootPath) ->
ManifestPath = filepath(RootPath, manifest_dir),
@ -815,6 +809,9 @@ simple_inker_completeactivejournal_test() ->
F2 = filename:join(JournalFP, "nursery_3.pnd"),
{ok, PidW} = leveled_cdb:cdb_open_writer(F2),
{ok, _F2} = leveled_cdb:cdb_complete(PidW),
F1 = filename:join(JournalFP, "nursery_1.cdb"),
F1r = filename:join(JournalFP, "nursery_1.pnd"),
ok = file:rename(F1, F1r),
{ok, Ink1} = ink_start(#inker_options{root_path=RootPath,
cdb_options=CDBopts}),
Obj1 = ink_get(Ink1, "Key1", 1),

View file

@ -52,7 +52,7 @@
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,

View file

@ -223,7 +223,7 @@
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,

View file

@ -143,7 +143,7 @@
-module(leveled_sft).
-behaviour(gen_server).
-include("../include/leveled.hrl").
-include("include/leveled.hrl").
-export([init/1,
handle_call/3,

View file

@ -7,7 +7,8 @@
journal_compaction/1,
fetchput_snapshot/1,
load_and_count/1,
load_and_count_withdelete/1]).
load_and_count_withdelete/1
]).
all() -> [simple_put_fetch_head_delete,
many_put_fetch_head,
@ -19,33 +20,33 @@ all() -> [simple_put_fetch_head_delete,
simple_put_fetch_head_delete(_Config) ->
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
check_bookie_forobject(Bookie1, TestObject),
check_bookie_formissingobject(Bookie1, "Bucket1", "Key2"),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
ok = leveled_bookie:book_close(Bookie1),
StartOpts2 = #bookie_options{root_path=RootPath,
max_journalsize=3000000},
{ok, Bookie2} = leveled_bookie:book_start(StartOpts2),
check_bookie_forobject(Bookie2, TestObject),
ObjList1 = generate_multiple_objects(5000, 2),
testutil:check_forobject(Bookie2, TestObject),
ObjList1 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
ObjList1),
ChkList1 = lists:sublist(lists:sort(ObjList1), 100),
check_bookie_forlist(Bookie2, ChkList1),
check_bookie_forobject(Bookie2, TestObject),
check_bookie_formissingobject(Bookie2, "Bucket1", "Key2"),
testutil:check_forlist(Bookie2, ChkList1),
testutil:check_forobject(Bookie2, TestObject),
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", "Value2",
[{add, "Index1", "Term1"}]),
{ok, "Value2"} = leveled_bookie:book_get(Bookie2, "Bucket1", "Key2"),
{ok, {62888926, 43}} = leveled_bookie:book_head(Bookie2,
"Bucket1",
"Key2"),
check_bookie_formissingobject(Bookie2, "Bucket1", "Key2"),
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", <<"Value2">>,
[{remove, "Index1", "Term1"},
{add, "Index1", <<"Term2">>}]),
@ -60,110 +61,111 @@ simple_put_fetch_head_delete(_Config) ->
{ok, Bookie4} = leveled_bookie:book_start(StartOpts2),
not_found = leveled_bookie:book_get(Bookie4, "Bucket1", "Key2"),
ok = leveled_bookie:book_close(Bookie4),
reset_filestructure().
testutil:reset_filestructure().
many_put_fetch_head(_Config) ->
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
ok = leveled_bookie:book_close(Bookie1),
StartOpts2 = #bookie_options{root_path=RootPath,
max_journalsize=1000000000},
{ok, Bookie2} = leveled_bookie:book_start(StartOpts2),
check_bookie_forobject(Bookie2, TestObject),
testutil:check_forobject(Bookie2, TestObject),
GenList = [2, 20002, 40002, 60002, 80002,
100002, 120002, 140002, 160002, 180002],
CLs = load_objects(20000, GenList, Bookie2, TestObject,
fun generate_multiple_smallobjects/2),
CLs = testutil:load_objects(20000, GenList, Bookie2, TestObject,
fun testutil:generate_smallobjects/2),
CL1A = lists:nth(1, CLs),
ChkListFixed = lists:nth(length(CLs), CLs),
check_bookie_forlist(Bookie2, CL1A),
ObjList2A = generate_multiple_objects(5000, 2),
testutil:check_forlist(Bookie2, CL1A),
ObjList2A = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
ObjList2A),
ChkList2A = lists:sublist(lists:sort(ObjList2A), 1000),
check_bookie_forlist(Bookie2, ChkList2A),
check_bookie_forlist(Bookie2, ChkListFixed),
check_bookie_forobject(Bookie2, TestObject),
check_bookie_forlist(Bookie2, ChkList2A),
check_bookie_forlist(Bookie2, ChkListFixed),
check_bookie_forobject(Bookie2, TestObject),
testutil:check_forlist(Bookie2, ChkList2A),
testutil:check_forlist(Bookie2, ChkListFixed),
testutil:check_forobject(Bookie2, TestObject),
testutil:check_forlist(Bookie2, ChkList2A),
testutil:check_forlist(Bookie2, ChkListFixed),
testutil:check_forobject(Bookie2, TestObject),
ok = leveled_bookie:book_close(Bookie2),
{ok, Bookie3} = leveled_bookie:book_start(StartOpts2),
check_bookie_forlist(Bookie3, ChkList2A),
check_bookie_forobject(Bookie3, TestObject),
testutil:check_forlist(Bookie3, ChkList2A),
testutil:check_forobject(Bookie3, TestObject),
ok = leveled_bookie:book_close(Bookie3),
reset_filestructure().
testutil:reset_filestructure().
journal_compaction(_Config) ->
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath,
max_journalsize=4000000},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
check_bookie_forobject(Bookie1, TestObject),
ObjList1 = generate_multiple_objects(5000, 2),
testutil:check_forobject(Bookie1, TestObject),
ObjList1 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
ObjList1),
ChkList1 = lists:sublist(lists:sort(ObjList1), 1000),
check_bookie_forlist(Bookie1, ChkList1),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forobject(Bookie1, TestObject),
{B2, K2, V2, Spec2, MD} = {"Bucket1",
"Key1",
"Value1",
[],
{"MDK1", "MDV1"}},
{TestObject2, TestSpec2} = generate_testobject(B2, K2, V2, Spec2, MD),
{TestObject2, TestSpec2} = testutil:generate_testobject(B2, K2,
V2, Spec2, MD),
ok = leveled_bookie:book_riakput(Bookie1, TestObject2, TestSpec2),
ok = leveled_bookie:book_compactjournal(Bookie1, 30000),
check_bookie_forlist(Bookie1, ChkList1),
check_bookie_forobject(Bookie1, TestObject),
check_bookie_forobject(Bookie1, TestObject2),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject2),
timer:sleep(5000), % Allow for compaction to complete
io:format("Has journal completed?~n"),
check_bookie_forlist(Bookie1, ChkList1),
check_bookie_forobject(Bookie1, TestObject),
check_bookie_forobject(Bookie1, TestObject2),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject2),
%% Now replace all the objects
ObjList2 = generate_multiple_objects(5000, 2),
ObjList2 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
ObjList2),
ok = leveled_bookie:book_compactjournal(Bookie1, 30000),
ChkList3 = lists:sublist(lists:sort(ObjList2), 500),
check_bookie_forlist(Bookie1, ChkList3),
testutil:check_forlist(Bookie1, ChkList3),
ok = leveled_bookie:book_close(Bookie1),
% Restart
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
check_bookie_forobject(Bookie2, TestObject),
check_bookie_forlist(Bookie2, ChkList3),
testutil:check_forobject(Bookie2, TestObject),
testutil:check_forlist(Bookie2, ChkList3),
ok = leveled_bookie:book_close(Bookie2),
reset_filestructure().
testutil:reset_filestructure().
fetchput_snapshot(_Config) ->
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath, max_journalsize=3000000},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ObjList1 = generate_multiple_objects(5000, 2),
ObjList1 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
ObjList1),
SnapOpts1 = #bookie_options{snapshot_bookie=Bookie1},
{ok, SnapBookie1} = leveled_bookie:book_start(SnapOpts1),
ChkList1 = lists:sublist(lists:sort(ObjList1), 100),
check_bookie_forlist(Bookie1, ChkList1),
check_bookie_forlist(SnapBookie1, ChkList1),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forlist(SnapBookie1, ChkList1),
ok = leveled_bookie:book_close(SnapBookie1),
check_bookie_forlist(Bookie1, ChkList1),
testutil:check_forlist(Bookie1, ChkList1),
ok = leveled_bookie:book_close(Bookie1),
io:format("Closed initial bookies~n"),
@ -172,89 +174,94 @@ fetchput_snapshot(_Config) ->
{ok, SnapBookie2} = leveled_bookie:book_start(SnapOpts2),
io:format("Bookies restarted~n"),
check_bookie_forlist(Bookie2, ChkList1),
testutil:check_forlist(Bookie2, ChkList1),
io:format("Check active bookie still contains original data~n"),
check_bookie_forlist(SnapBookie2, ChkList1),
testutil:check_forlist(SnapBookie2, ChkList1),
io:format("Check snapshot still contains original data~n"),
ObjList2 = generate_multiple_objects(5000, 2),
ObjList2 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
ObjList2),
io:format("Replacement objects put~n"),
ChkList2 = lists:sublist(lists:sort(ObjList2), 100),
check_bookie_forlist(Bookie2, ChkList2),
check_bookie_forlist(SnapBookie2, ChkList1),
testutil:check_forlist(Bookie2, ChkList2),
testutil:check_forlist(SnapBookie2, ChkList1),
io:format("Checked for replacement objects in active bookie" ++
", old objects in snapshot~n"),
{ok, FNsA} = file:list_dir(RootPath ++ "/ledger/ledger_files"),
ObjList3 = generate_multiple_objects(15000, 5002),
ObjList3 = testutil:generate_objects(15000, 5002),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
ObjList3),
ChkList3 = lists:sublist(lists:sort(ObjList3), 100),
check_bookie_forlist(Bookie2, ChkList3),
check_bookie_formissinglist(SnapBookie2, ChkList3),
testutil:check_forlist(Bookie2, ChkList3),
testutil:check_formissinglist(SnapBookie2, ChkList3),
GenList = [20002, 40002, 60002, 80002, 100002, 120002],
CLs2 = load_objects(20000, GenList, Bookie2, TestObject,
fun generate_multiple_smallobjects/2),
CLs2 = testutil:load_objects(20000, GenList, Bookie2, TestObject,
fun testutil:generate_smallobjects/2),
io:format("Loaded significant numbers of new objects~n"),
check_bookie_forlist(Bookie2, lists:nth(length(CLs2), CLs2)),
testutil:check_forlist(Bookie2, lists:nth(length(CLs2), CLs2)),
io:format("Checked active bookie has new objects~n"),
{ok, SnapBookie3} = leveled_bookie:book_start(SnapOpts2),
check_bookie_forlist(SnapBookie3, lists:nth(length(CLs2), CLs2)),
check_bookie_formissinglist(SnapBookie2, ChkList3),
check_bookie_formissinglist(SnapBookie2, lists:nth(length(CLs2), CLs2)),
check_bookie_forlist(SnapBookie3, ChkList2),
check_bookie_forlist(SnapBookie2, ChkList1),
testutil:check_forlist(SnapBookie3, lists:nth(length(CLs2), CLs2)),
testutil:check_formissinglist(SnapBookie2, ChkList3),
testutil:check_formissinglist(SnapBookie2, lists:nth(length(CLs2), CLs2)),
testutil:check_forlist(SnapBookie3, ChkList2),
testutil:check_forlist(SnapBookie2, ChkList1),
io:format("Started new snapshot and check for new objects~n"),
CLs3 = load_objects(20000, GenList, Bookie2, TestObject,
fun generate_multiple_smallobjects/2),
check_bookie_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
check_bookie_forlist(Bookie2, lists:nth(1, CLs3)),
CLs3 = testutil:load_objects(20000, GenList, Bookie2, TestObject,
fun testutil:generate_smallobjects/2),
testutil:check_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
testutil:check_forlist(Bookie2, lists:nth(1, CLs3)),
{ok, FNsB} = file:list_dir(RootPath ++ "/ledger/ledger_files"),
ok = leveled_bookie:book_close(SnapBookie2),
check_bookie_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
testutil:check_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
ok = leveled_bookie:book_close(SnapBookie3),
check_bookie_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
check_bookie_forlist(Bookie2, lists:nth(1, CLs3)),
testutil:check_forlist(Bookie2, lists:nth(length(CLs3), CLs3)),
testutil:check_forlist(Bookie2, lists:nth(1, CLs3)),
timer:sleep(90000),
{ok, FNsC} = file:list_dir(RootPath ++ "/ledger/ledger_files"),
true = length(FNsB) > length(FNsA),
true = length(FNsB) > length(FNsC),
{B1Size, B1Count} = check_bucket_stats(Bookie2, "Bucket1"),
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, "Bucket1"),
true = B1Size > 0,
true = B1Count == 1,
{B1Size, B1Count} = check_bucket_stats(Bookie2, "Bucket1"),
{BSize, BCount} = check_bucket_stats(Bookie2, "Bucket"),
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, "Bucket1"),
{BSize, BCount} = testutil:check_bucket_stats(Bookie2, "Bucket"),
true = BSize > 0,
true = BCount == 140000,
ok = leveled_bookie:book_close(Bookie2),
reset_filestructure().
testutil:reset_filestructure().
load_and_count(_Config) ->
% Use artificially small files, and the load keys, counting they're all
% present
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath, max_journalsize=50000000},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading initial small objects~n"),
G1 = fun testutil:generate_smallobjects/2,
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, TestObject,
fun generate_multiple_smallobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
TestObject,
G1),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Acc + 5000 == Count ->
ok
@ -262,12 +269,17 @@ load_and_count(_Config) ->
Acc + 5000 end,
0,
lists:seq(1, 20)),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading larger compressible objects~n"),
G2 = fun testutil:generate_compressibleobjects/2,
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, TestObject,
fun generate_multiple_compressibleobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
TestObject,
G2),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Acc + 5000 == Count ->
ok
@ -275,12 +287,16 @@ load_and_count(_Config) ->
Acc + 5000 end,
100000,
lists:seq(1, 20)),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
io:format("Replacing small objects~n"),
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, TestObject,
fun generate_multiple_smallobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
TestObject,
G1),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Count == 200000 ->
ok
@ -288,12 +304,16 @@ load_and_count(_Config) ->
Acc + 5000 end,
0,
lists:seq(1, 20)),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading more small objects~n"),
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, TestObject,
fun generate_multiple_compressibleobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
TestObject,
G2),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Acc + 5000 == Count ->
ok
@ -301,25 +321,30 @@ load_and_count(_Config) ->
Acc + 5000 end,
200000,
lists:seq(1, 20)),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
ok = leveled_bookie:book_close(Bookie1),
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
{_BSize, 300000} = check_bucket_stats(Bookie2, "Bucket"),
{_, 300000} = testutil:check_bucket_stats(Bookie2, "Bucket"),
ok = leveled_bookie:book_close(Bookie2),
reset_filestructure().
testutil:reset_filestructure().
load_and_count_withdelete(_Config) ->
RootPath = reset_filestructure(),
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath, max_journalsize=50000000},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = generate_testobject(),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading initial small objects~n"),
G1 = fun testutil:generate_smallobjects/2,
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, TestObject,
fun generate_multiple_smallobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
TestObject,
G1),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Acc + 5000 == Count ->
ok
@ -327,17 +352,22 @@ load_and_count_withdelete(_Config) ->
Acc + 5000 end,
0,
lists:seq(1, 20)),
check_bookie_forobject(Bookie1, TestObject),
testutil:check_forobject(Bookie1, TestObject),
{BucketD, KeyD} = leveled_codec:riakto_keydetails(TestObject),
{_, 1} = check_bucket_stats(Bookie1, BucketD),
{_, 1} = testutil:check_bucket_stats(Bookie1, BucketD),
ok = leveled_bookie:book_riakdelete(Bookie1, BucketD, KeyD, []),
not_found = leveled_bookie:book_riakget(Bookie1, BucketD, KeyD),
{_, 0} = check_bucket_stats(Bookie1, BucketD),
{_, 0} = testutil:check_bucket_stats(Bookie1, BucketD),
io:format("Loading larger compressible objects~n"),
G2 = fun testutil:generate_compressibleobjects/2,
lists:foldl(fun(_X, Acc) ->
load_objects(5000, [Acc + 2], Bookie1, no_check,
fun generate_multiple_compressibleobjects/2),
{_Size, Count} = check_bucket_stats(Bookie1, "Bucket"),
testutil:load_objects(5000,
[Acc + 2],
Bookie1,
no_check,
G2),
{_S, Count} = testutil:check_bucket_stats(Bookie1,
"Bucket"),
if
Acc + 5000 == Count ->
ok
@ -348,160 +378,8 @@ load_and_count_withdelete(_Config) ->
not_found = leveled_bookie:book_riakget(Bookie1, BucketD, KeyD),
ok = leveled_bookie:book_close(Bookie1),
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
check_bookie_formissingobject(Bookie2, BucketD, KeyD),
{_BSize, 0} = check_bucket_stats(Bookie2, BucketD),
ok = leveled_bookie:book_close(Bookie2).
testutil:check_formissingobject(Bookie2, BucketD, KeyD),
{_BSize, 0} = testutil:check_bucket_stats(Bookie2, BucketD),
ok = leveled_bookie:book_close(Bookie2),
testutil:reset_filestructure().
reset_filestructure() ->
RootPath = "test",
filelib:ensure_dir(RootPath ++ "/journal/"),
filelib:ensure_dir(RootPath ++ "/ledger/"),
leveled_inker:clean_testdir(RootPath ++ "/journal"),
leveled_penciller:clean_testdir(RootPath ++ "/ledger"),
RootPath.
check_bucket_stats(Bookie, Bucket) ->
FoldSW1 = os:timestamp(),
io:format("Checking bucket size~n"),
{async, Folder1} = leveled_bookie:book_returnfolder(Bookie,
{riakbucket_stats,
Bucket}),
{B1Size, B1Count} = Folder1(),
io:format("Bucket fold completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(), FoldSW1)]),
io:format("Bucket ~s has size ~w and count ~w~n",
[Bucket, B1Size, B1Count]),
{B1Size, B1Count}.
check_bookie_forlist(Bookie, ChkList) ->
check_bookie_forlist(Bookie, ChkList, false).
check_bookie_forlist(Bookie, ChkList, Log) ->
SW = os:timestamp(),
lists:foreach(fun({_RN, Obj, _Spc}) ->
if
Log == true ->
io:format("Fetching Key ~w~n", [Obj#r_object.key]);
true ->
ok
end,
R = leveled_bookie:book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
R = {ok, Obj} end,
ChkList),
io:format("Fetch check took ~w microseconds checking list of length ~w~n",
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
check_bookie_formissinglist(Bookie, ChkList) ->
SW = os:timestamp(),
lists:foreach(fun({_RN, Obj, _Spc}) ->
R = leveled_bookie:book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
R = not_found end,
ChkList),
io:format("Miss check took ~w microseconds checking list of length ~w~n",
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
check_bookie_forobject(Bookie, TestObject) ->
{ok, TestObject} = leveled_bookie:book_riakget(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
{ok, HeadObject} = leveled_bookie:book_riakhead(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
ok = case {HeadObject#r_object.bucket,
HeadObject#r_object.key,
HeadObject#r_object.vclock} of
{B1, K1, VC1} when B1 == TestObject#r_object.bucket,
K1 == TestObject#r_object.key,
VC1 == TestObject#r_object.vclock ->
ok
end.
check_bookie_formissingobject(Bookie, Bucket, Key) ->
not_found = leveled_bookie:book_riakget(Bookie, Bucket, Key),
not_found = leveled_bookie:book_riakhead(Bookie, Bucket, Key).
generate_testobject() ->
{B1, K1, V1, Spec1, MD} = {"Bucket1",
"Key1",
"Value1",
[],
{"MDK1", "MDV1"}},
generate_testobject(B1, K1, V1, Spec1, MD).
generate_testobject(B, K, V, Spec, MD) ->
Content = #r_content{metadata=MD, value=V},
{#r_object{bucket=B, key=K, contents=[Content], vclock=[{'a',1}]},
Spec}.
generate_multiple_compressibleobjects(Count, KeyNumber) ->
S1 = "111111111111111",
S2 = "222222222222222",
S3 = "333333333333333",
S4 = "aaaaaaaaaaaaaaa",
S5 = "AAAAAAAAAAAAAAA",
S6 = "GGGGGGGGGGGGGGG",
S7 = "===============",
S8 = "...............",
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
L = lists:seq(1, 1024),
V = lists:foldl(fun(_X, Acc) ->
{_, Str} = lists:keyfind(random:uniform(8), 1, Selector),
Acc ++ Str end,
"",
L),
generate_multiple_objects(Count, KeyNumber, [], V).
generate_multiple_smallobjects(Count, KeyNumber) ->
generate_multiple_objects(Count, KeyNumber, [], crypto:rand_bytes(512)).
generate_multiple_objects(Count, KeyNumber) ->
generate_multiple_objects(Count, KeyNumber, [], crypto:rand_bytes(4096)).
generate_multiple_objects(0, _KeyNumber, ObjL, _Value) ->
ObjL;
generate_multiple_objects(Count, KeyNumber, ObjL, Value) ->
Obj = {"Bucket",
"Key" ++ integer_to_list(KeyNumber),
Value,
[],
[{"MDK", "MDV" ++ integer_to_list(KeyNumber)},
{"MDK2", "MDV" ++ integer_to_list(KeyNumber)}]},
{B1, K1, V1, Spec1, MD} = Obj,
Content = #r_content{metadata=MD, value=V1},
Obj1 = #r_object{bucket=B1, key=K1, contents=[Content], vclock=[{'a',1}]},
generate_multiple_objects(Count - 1,
KeyNumber + 1,
ObjL ++ [{random:uniform(), Obj1, Spec1}],
Value).
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator) ->
lists:map(fun(KN) ->
ObjListA = Generator(ChunkSize, KN),
StartWatchA = os:timestamp(),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie, Obj, Spc)
end,
ObjListA),
Time = timer:now_diff(os:timestamp(), StartWatchA),
io:format("~w objects loaded in ~w seconds~n",
[ChunkSize, Time/1000000]),
if
TestObject == no_check ->
ok;
true ->
check_bookie_forobject(Bookie, TestObject)
end,
lists:sublist(ObjListA, 1000) end,
GenList).

View file

@ -0,0 +1,182 @@
-module(iterator_SUITE).
-include_lib("common_test/include/ct.hrl").
-include("include/leveled.hrl").
-export([all/0]).
-export([simple_load_with2i/1,
simple_querycount/1]).
all() -> [simple_load_with2i,
simple_querycount].
simple_load_with2i(_Config) ->
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath,
max_journalsize=50000000},
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
testutil:check_forobject(Bookie1, TestObject),
ObjL1 = testutil:generate_objects(10000,
uuid,
[],
testutil:get_compressiblevalue(),
testutil:get_randomindexes_generator(8)),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
ObjL1),
ChkList1 = lists:sublist(lists:sort(ObjL1), 100),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forobject(Bookie1, TestObject),
ok = leveled_bookie:book_close(Bookie1),
testutil:reset_filestructure().
simple_querycount(_Config) ->
RootPath = testutil:reset_filestructure(),
StartOpts1 = #bookie_options{root_path=RootPath,
max_journalsize=50000000},
{ok, Book1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Book1, TestObject, TestSpec),
testutil:check_forobject(Book1, TestObject),
testutil:check_formissingobject(Book1, "Bucket1", "Key2"),
testutil:check_forobject(Book1, TestObject),
lists:foreach(fun(_X) ->
V = testutil:get_compressiblevalue(),
Indexes = testutil:get_randomindexes_generator(8),
SW = os:timestamp(),
ObjL1 = testutil:generate_objects(10000,
uuid,
[],
V,
Indexes),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Book1,
Obj,
Spc)
end,
ObjL1),
io:format("Put of 10000 objects with 8 index entries "
++
"each completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(), SW)])
end,
lists:seq(1, 8)),
testutil:check_forobject(Book1, TestObject),
Total = lists:foldl(fun(X, Acc) ->
IdxF = "idx" ++ integer_to_list(X) ++ "_bin",
T = count_termsonindex("Bucket",
IdxF,
Book1,
{false, undefined}),
io:format("~w terms found on index ~s~n",
[T, IdxF]),
Acc + T
end,
0,
lists:seq(1, 8)),
ok = case Total of
640000 ->
ok
end,
Index1Count = count_termsonindex("Bucket",
"idx1_bin",
Book1,
{false, undefined}),
ok = leveled_bookie:book_close(Book1),
{ok, Book2} = leveled_bookie:book_start(StartOpts1),
Index1Count = count_termsonindex("Bucket",
"idx1_bin",
Book2,
{false, undefined}),
NameList = testutil:name_list(),
TotalNameByName = lists:foldl(fun({_X, Name}, Acc) ->
{ok, Regex} = re:compile("[0-9]+" ++
Name),
SW = os:timestamp(),
T = count_termsonindex("Bucket",
"idx1_bin",
Book2,
{false,
Regex}),
TD = timer:now_diff(os:timestamp(),
SW),
io:format("~w terms found on " ++
"index idx1 with a " ++
"regex in ~w " ++
"microseconds~n",
[T, TD]),
Acc + T
end,
0,
NameList),
ok = case TotalNameByName of
Index1Count ->
ok
end,
RegMia = re:compile("[0-9]+Mia"),
{async,
Mia2KFolder1} = leveled_bookie:book_returnfolder(Book2,
{index_query,
"Bucket",
{"idx2_bin",
"2000L",
"2000N~"},
{false,
RegMia}}),
Mia2000Count1 = length(Mia2KFolder1()),
{async,
Mia2KFolder2} = leveled_bookie:book_returnfolder(Book2,
{index_query,
"Bucket",
{"idx2_bin",
"2000Ma",
"2000Mz"},
{true,
undefined}}),
Mia2000Count2 = lists:foldl(fun({Term, _Key}, Acc) ->
case Term of
"2000Mia" ->
Acc + 1;
_ ->
Acc
end end,
0,
Mia2KFolder2()),
ok = case Mia2000Count2 of
Mia2000Count1 ->
ok
end,
ok = leveled_bookie:book_close(Book2),
testutil:reset_filestructure().
count_termsonindex(Bucket, IdxField, Book, QType) ->
lists:foldl(fun(X, Acc) ->
SW = os:timestamp(),
ST = integer_to_list(X),
ET = ST ++ "~",
R = leveled_bookie:book_returnfolder(Book,
{index_query,
Bucket,
{IdxField,
ST,
ET},
QType}),
{async, Folder} = R,
Items = length(Folder()),
io:format("2i query from term ~s on index ~s took " ++
"~w microseconds~n",
[ST,
IdxField,
timer:now_diff(os:timestamp(), SW)]),
Acc + Items
end,
0,
lists:seq(1901, 2218)).

View file

@ -0,0 +1,232 @@
-module(testutil).
-include("../include/leveled.hrl").
-export([reset_filestructure/0,
check_bucket_stats/2,
check_forlist/2,
check_forlist/3,
check_formissinglist/2,
check_forobject/2,
check_formissingobject/3,
generate_testobject/0,
generate_testobject/5,
generate_compressibleobjects/2,
generate_smallobjects/2,
generate_objects/2,
generate_objects/5,
get_compressiblevalue/0,
get_randomindexes_generator/1,
name_list/0,
load_objects/5]).
reset_filestructure() ->
RootPath = "test",
filelib:ensure_dir(RootPath ++ "/journal/"),
filelib:ensure_dir(RootPath ++ "/ledger/"),
leveled_inker:clean_testdir(RootPath ++ "/journal"),
leveled_penciller:clean_testdir(RootPath ++ "/ledger"),
RootPath.
check_bucket_stats(Bookie, Bucket) ->
FoldSW1 = os:timestamp(),
io:format("Checking bucket size~n"),
{async, Folder1} = leveled_bookie:book_returnfolder(Bookie,
{riakbucket_stats,
Bucket}),
{B1Size, B1Count} = Folder1(),
io:format("Bucket fold completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(), FoldSW1)]),
io:format("Bucket ~s has size ~w and count ~w~n",
[Bucket, B1Size, B1Count]),
{B1Size, B1Count}.
check_forlist(Bookie, ChkList) ->
check_forlist(Bookie, ChkList, false).
check_forlist(Bookie, ChkList, Log) ->
SW = os:timestamp(),
lists:foreach(fun({_RN, Obj, _Spc}) ->
if
Log == true ->
io:format("Fetching Key ~w~n", [Obj#r_object.key]);
true ->
ok
end,
R = leveled_bookie:book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
R = {ok, Obj} end,
ChkList),
io:format("Fetch check took ~w microseconds checking list of length ~w~n",
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
check_formissinglist(Bookie, ChkList) ->
SW = os:timestamp(),
lists:foreach(fun({_RN, Obj, _Spc}) ->
R = leveled_bookie:book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
R = not_found end,
ChkList),
io:format("Miss check took ~w microseconds checking list of length ~w~n",
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
check_forobject(Bookie, TestObject) ->
{ok, TestObject} = leveled_bookie:book_riakget(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
{ok, HeadObject} = leveled_bookie:book_riakhead(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
ok = case {HeadObject#r_object.bucket,
HeadObject#r_object.key,
HeadObject#r_object.vclock} of
{B1, K1, VC1} when B1 == TestObject#r_object.bucket,
K1 == TestObject#r_object.key,
VC1 == TestObject#r_object.vclock ->
ok
end.
check_formissingobject(Bookie, Bucket, Key) ->
not_found = leveled_bookie:book_riakget(Bookie, Bucket, Key),
not_found = leveled_bookie:book_riakhead(Bookie, Bucket, Key).
generate_testobject() ->
{B1, K1, V1, Spec1, MD} = {"Bucket1",
"Key1",
"Value1",
[],
{"MDK1", "MDV1"}},
generate_testobject(B1, K1, V1, Spec1, MD).
generate_testobject(B, K, V, Spec, MD) ->
Content = #r_content{metadata=MD, value=V},
{#r_object{bucket=B, key=K, contents=[Content], vclock=[{'a',1}]},
Spec}.
generate_compressibleobjects(Count, KeyNumber) ->
V = get_compressiblevalue(),
generate_objects(Count, KeyNumber, [], V).
get_compressiblevalue() ->
S1 = "111111111111111",
S2 = "222222222222222",
S3 = "333333333333333",
S4 = "aaaaaaaaaaaaaaa",
S5 = "AAAAAAAAAAAAAAA",
S6 = "GGGGGGGGGGGGGGG",
S7 = "===============",
S8 = "...............",
Selector = [{1, S1}, {2, S2}, {3, S3}, {4, S4},
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
L = lists:seq(1, 1024),
lists:foldl(fun(_X, Acc) ->
{_, Str} = lists:keyfind(random:uniform(8), 1, Selector),
Acc ++ Str end,
"",
L).
generate_smallobjects(Count, KeyNumber) ->
generate_objects(Count, KeyNumber, [], crypto:rand_bytes(512)).
generate_objects(Count, KeyNumber) ->
generate_objects(Count, KeyNumber, [], crypto:rand_bytes(4096)).
generate_objects(Count, KeyNumber, ObjL, Value) ->
generate_objects(Count, KeyNumber, ObjL, Value, fun() -> [] end).
generate_objects(0, _KeyNumber, ObjL, _Value, _IndexGen) ->
ObjL;
generate_objects(Count, uuid, ObjL, Value, IndexGen) ->
{Obj1, Spec1} = set_object(leveled_codec:generate_uuid(),
Value,
IndexGen),
generate_objects(Count - 1,
uuid,
ObjL ++ [{random:uniform(), Obj1, Spec1}],
Value,
IndexGen);
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen) ->
{Obj1, Spec1} = set_object("Key" ++ integer_to_list(KeyNumber),
Value,
IndexGen),
generate_objects(Count - 1,
KeyNumber + 1,
ObjL ++ [{random:uniform(), Obj1, Spec1}],
Value,
IndexGen).
set_object(Key, Value, IndexGen) ->
Obj = {"Bucket",
Key,
Value,
IndexGen(),
[{"MDK", "MDV" ++ Key},
{"MDK2", "MDV" ++ Key}]},
{B1, K1, V1, Spec1, MD} = Obj,
Content = #r_content{metadata=MD, value=V1},
{#r_object{bucket=B1, key=K1, contents=[Content], vclock=[{'a',1}]},
Spec1}.
load_objects(ChunkSize, GenList, Bookie, TestObject, Generator) ->
lists:map(fun(KN) ->
ObjListA = Generator(ChunkSize, KN),
StartWatchA = os:timestamp(),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie, Obj, Spc)
end,
ObjListA),
Time = timer:now_diff(os:timestamp(), StartWatchA),
io:format("~w objects loaded in ~w seconds~n",
[ChunkSize, Time/1000000]),
if
TestObject == no_check ->
ok;
true ->
check_forobject(Bookie, TestObject)
end,
lists:sublist(ObjListA, 1000) end,
GenList).
get_randomindexes_generator(Count) ->
Generator = fun() ->
lists:map(fun(X) ->
{add,
"idx" ++ integer_to_list(X) ++ "_bin",
get_randomdate() ++ get_randomname()} end,
lists:seq(1, Count))
end,
Generator.
name_list() ->
[{1, "Sophia"}, {2, "Emma"}, {3, "Olivia"}, {4, "Ava"},
{5, "Isabella"}, {6, "Mia"}, {7, "Zoe"}, {8, "Lily"},
{9, "Emily"}, {10, "Madelyn"}, {11, "Madison"}, {12, "Chloe"},
{13, "Charlotte"}, {14, "Aubrey"}, {15, "Avery"},
{16, "Abigail"}].
get_randomname() ->
NameList = name_list(),
N = random:uniform(16),
{N, Name} = lists:keyfind(N, 1, NameList),
Name.
get_randomdate() ->
LowTime = 60000000000,
HighTime = 70000000000,
RandPoint = LowTime + random:uniform(HighTime - LowTime),
Date = calendar:gregorian_seconds_to_datetime(RandPoint),
{{Year, Month, Day}, {Hour, Minute, Second}} = Date,
lists:flatten(io_lib:format("~4..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
[Year, Month, Day, Hour, Minute, Second])).