Clean API of Riak-specific Methods

Clena the API of Riak specific methods, and also resolve timing issue in
simple_server unit test.  Previously this would end up with missing data
(and a lower sequence number after start) because of the penciller_clerk
timeout being relatively large in the context of this test.  Now the
timeout has bene reduced the L0 slot is cleared by the time of the
close.  To make sure an extra sleep has been added as a precaution to
avoid any intermittent issues.
This commit is contained in:
martinsumner 2016-11-07 10:11:57 +00:00
parent 2d590c3077
commit 4583460328
6 changed files with 124 additions and 103 deletions

View file

@ -134,10 +134,6 @@
code_change/3,
book_start/1,
book_start/3,
book_riakput/3,
book_riakdelete/4,
book_riakget/3,
book_riakhead/3,
book_put/5,
book_put/6,
book_tempput/7,
@ -184,9 +180,6 @@ book_start(RootPath, LedgerCacheSize, JournalSize) ->
book_start(Opts) ->
gen_server:start(?MODULE, [Opts], []).
book_riakput(Pid, RiakObject, IndexSpecs) ->
{Bucket, Key} = leveled_codec:riakto_keydetails(RiakObject),
book_put(Pid, Bucket, Key, RiakObject, IndexSpecs, ?RIAK_TAG).
book_tempput(Pid, Bucket, Key, Object, IndexSpecs, Tag, TTL) when is_integer(TTL) ->
book_put(Pid, Bucket, Key, Object, IndexSpecs, Tag, TTL).
@ -197,21 +190,12 @@ book_put(Pid, Bucket, Key, Object, IndexSpecs) ->
book_put(Pid, Bucket, Key, Object, IndexSpecs, Tag) ->
book_put(Pid, Bucket, Key, Object, IndexSpecs, Tag, infinity).
book_riakdelete(Pid, Bucket, Key, IndexSpecs) ->
book_put(Pid, Bucket, Key, delete, IndexSpecs, ?RIAK_TAG).
book_delete(Pid, Bucket, Key, IndexSpecs) ->
book_put(Pid, Bucket, Key, delete, IndexSpecs, ?STD_TAG).
book_riakget(Pid, Bucket, Key) ->
book_get(Pid, Bucket, Key, ?RIAK_TAG).
book_get(Pid, Bucket, Key) ->
book_get(Pid, Bucket, Key, ?STD_TAG).
book_riakhead(Pid, Bucket, Key) ->
book_head(Pid, Bucket, Key, ?RIAK_TAG).
book_head(Pid, Bucket, Key) ->
book_head(Pid, Bucket, Key, ?STD_TAG).
@ -933,12 +917,12 @@ single_key_test() ->
{"MDK1", "MDV1"}},
Content = #r_content{metadata=MD, value=V1},
Object = #r_object{bucket=B1, key=K1, contents=[Content], vclock=[{'a',1}]},
ok = book_riakput(Bookie1, Object, Spec1),
{ok, F1} = book_riakget(Bookie1, B1, K1),
ok = book_put(Bookie1, B1, K1, Object, Spec1, ?RIAK_TAG),
{ok, F1} = book_get(Bookie1, B1, K1, ?RIAK_TAG),
?assertMatch(F1, Object),
ok = book_close(Bookie1),
{ok, Bookie2} = book_start([{root_path, RootPath}]),
{ok, F2} = book_riakget(Bookie2, B1, K1),
{ok, F2} = book_get(Bookie2, B1, K1, ?RIAK_TAG),
?assertMatch(F2, Object),
ok = book_close(Bookie2),
reset_filestructure().
@ -960,41 +944,53 @@ multi_key_test() ->
{"MDK2", "MDV2"}},
C2 = #r_content{metadata=MD2, value=V2},
Obj2 = #r_object{bucket=B2, key=K2, contents=[C2], vclock=[{'a',1}]},
ok = book_riakput(Bookie1, Obj1, Spec1),
ok = book_put(Bookie1, B1, K1, Obj1, Spec1, ?RIAK_TAG),
ObjL1 = generate_multiple_robjects(100, 3),
SW1 = os:timestamp(),
lists:foreach(fun({O, S}) -> ok = book_riakput(Bookie1, O, S) end, ObjL1),
lists:foreach(fun({O, S}) ->
{B, K} = leveled_codec:riakto_keydetails(O),
ok = book_put(Bookie1, B, K, O, S, ?RIAK_TAG)
end,
ObjL1),
io:format("PUT of 100 objects completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(),SW1)]),
ok = book_riakput(Bookie1, Obj2, Spec2),
{ok, F1A} = book_riakget(Bookie1, B1, K1),
ok = book_put(Bookie1, B2, K2, Obj2, Spec2, ?RIAK_TAG),
{ok, F1A} = book_get(Bookie1, B1, K1, ?RIAK_TAG),
?assertMatch(F1A, Obj1),
{ok, F2A} = book_riakget(Bookie1, B2, K2),
{ok, F2A} = book_get(Bookie1, B2, K2, ?RIAK_TAG),
?assertMatch(F2A, Obj2),
ObjL2 = generate_multiple_robjects(100, 103),
SW2 = os:timestamp(),
lists:foreach(fun({O, S}) -> ok = book_riakput(Bookie1, O, S) end, ObjL2),
lists:foreach(fun({O, S}) ->
{B, K} = leveled_codec:riakto_keydetails(O),
ok = book_put(Bookie1, B, K, O, S, ?RIAK_TAG)
end,
ObjL2),
io:format("PUT of 100 objects completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(),SW2)]),
{ok, F1B} = book_riakget(Bookie1, B1, K1),
{ok, F1B} = book_get(Bookie1, B1, K1, ?RIAK_TAG),
?assertMatch(F1B, Obj1),
{ok, F2B} = book_riakget(Bookie1, B2, K2),
{ok, F2B} = book_get(Bookie1, B2, K2, ?RIAK_TAG),
?assertMatch(F2B, Obj2),
ok = book_close(Bookie1),
% Now reopen the file, and confirm that a fetch is still possible
{ok, Bookie2} = book_start([{root_path, RootPath}]),
{ok, F1C} = book_riakget(Bookie2, B1, K1),
{ok, F1C} = book_get(Bookie2, B1, K1, ?RIAK_TAG),
?assertMatch(F1C, Obj1),
{ok, F2C} = book_riakget(Bookie2, B2, K2),
{ok, F2C} = book_get(Bookie2, B2, K2, ?RIAK_TAG),
?assertMatch(F2C, Obj2),
ObjL3 = generate_multiple_robjects(100, 203),
SW3 = os:timestamp(),
lists:foreach(fun({O, S}) -> ok = book_riakput(Bookie2, O, S) end, ObjL3),
lists:foreach(fun({O, S}) ->
{B, K} = leveled_codec:riakto_keydetails(O),
ok = book_put(Bookie2, B, K, O, S, ?RIAK_TAG)
end,
ObjL3),
io:format("PUT of 100 objects completed in ~w microseconds~n",
[timer:now_diff(os:timestamp(),SW3)]),
{ok, F1D} = book_riakget(Bookie2, B1, K1),
{ok, F1D} = book_get(Bookie2, B1, K1, ?RIAK_TAG),
?assertMatch(F1D, Obj1),
{ok, F2D} = book_riakget(Bookie2, B2, K2),
{ok, F2D} = book_get(Bookie2, B2, K2, ?RIAK_TAG),
?assertMatch(F2D, Obj2),
ok = book_close(Bookie2),
reset_filestructure().

View file

@ -1298,13 +1298,18 @@ simple_server_test() ->
clean_testdir(RootPath),
{ok, PCL} = pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000}),
Key1 = {{o,"Bucket0001", "Key0001", null}, {1, {active, infinity}, null}},
Key1 = {{o,"Bucket0001", "Key0001", null},
{1, {active, infinity}, null}},
KL1 = leveled_sft:generate_randomkeys({1000, 2}),
Key2 = {{o,"Bucket0002", "Key0002", null}, {1002, {active, infinity}, null}},
KL2 = leveled_sft:generate_randomkeys({1000, 1003}),
Key3 = {{o,"Bucket0003", "Key0003", null}, {2003, {active, infinity}, null}},
Key2 = {{o,"Bucket0002", "Key0002", null},
{1002, {active, infinity}, null}},
KL2 = leveled_sft:generate_randomkeys({900, 1003}),
% Keep below the max table size by having 900 not 1000
Key3 = {{o,"Bucket0003", "Key0003", null},
{2003, {active, infinity}, null}},
KL3 = leveled_sft:generate_randomkeys({1000, 2004}),
Key4 = {{o,"Bucket0004", "Key0004", null}, {3004, {active, infinity}, null}},
Key4 = {{o,"Bucket0004", "Key0004", null},
{3004, {active, infinity}, null}},
KL4 = leveled_sft:generate_randomkeys({1000, 3005}),
ok = maybe_pause_push(PCL, [Key1]),
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
@ -1321,13 +1326,15 @@ simple_server_test() ->
?assertMatch(Key1, pcl_fetch(PCL, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCL, {o,"Bucket0002", "Key0002", null})),
?assertMatch(Key3, pcl_fetch(PCL, {o,"Bucket0003", "Key0003", null})),
timer:sleep(200),
% This sleep should make sure that the merge to L1 has occurred
% This will free up the L0 slot for the remainder to be written in shutdown
ok = pcl_close(PCL),
{ok, PCLr} = pcl_start(#penciller_options{root_path=RootPath,
max_inmemory_tablesize=1000}),
?assertMatch(1001, pcl_getstartupsequencenumber(PCLr)),
ok = maybe_pause_push(PCLr, [Key2] ++ KL2 ++ [Key3]),
io:format("Back to starting position with lost data recovered~n"),
?assertMatch(2003, pcl_getstartupsequencenumber(PCLr)),
% ok = maybe_pause_push(PCLr, [Key2] ++ KL2 ++ [Key3]),
?assertMatch(Key1, pcl_fetch(PCLr, {o,"Bucket0001", "Key0001", null})),
?assertMatch(Key2, pcl_fetch(PCLr, {o,"Bucket0002", "Key0002", null})),

View file

@ -27,7 +27,7 @@ simple_put_fetch_head_delete(_Config) ->
StartOpts1 = [{root_path, RootPath}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
ok = leveled_bookie:book_close(Bookie1),
@ -37,7 +37,7 @@ simple_put_fetch_head_delete(_Config) ->
testutil:check_forobject(Bookie2, TestObject),
ObjList1 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
testutil:book_riakput(Bookie2, Obj, Spc) end,
ObjList1),
ChkList1 = lists:sublist(lists:sort(ObjList1), 100),
testutil:check_forlist(Bookie2, ChkList1),
@ -72,7 +72,7 @@ many_put_fetch_head(_Config) ->
StartOpts1 = [{root_path, RootPath}, {max_pencillercachesize, 16000}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
ok = leveled_bookie:book_close(Bookie1),
StartOpts2 = [{root_path, RootPath},
@ -89,7 +89,7 @@ many_put_fetch_head(_Config) ->
testutil:check_forlist(Bookie2, CL1A),
ObjList2A = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
testutil:book_riakput(Bookie2, Obj, Spc) end,
ObjList2A),
ChkList2A = lists:sublist(lists:sort(ObjList2A), 1000),
testutil:check_forlist(Bookie2, ChkList2A),
@ -113,11 +113,11 @@ journal_compaction(_Config) ->
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
ok = leveled_bookie:book_compactjournal(Bookie1, 30000),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
ObjList1 = testutil:generate_objects(20000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjList1),
ChkList1 = lists:sublist(lists:sort(ObjList1), 10000),
testutil:check_forlist(Bookie1, ChkList1),
@ -129,7 +129,7 @@ journal_compaction(_Config) ->
{"MDK1", "MDV1"}},
{TestObject2, TestSpec2} = testutil:generate_testobject(B2, K2,
V2, Spec2, MD),
ok = leveled_bookie:book_riakput(Bookie1, TestObject2, TestSpec2),
ok = testutil:book_riakput(Bookie1, TestObject2, TestSpec2),
ok = leveled_bookie:book_compactjournal(Bookie1, 30000),
testutil:check_forlist(Bookie1, ChkList1),
testutil:check_forobject(Bookie1, TestObject),
@ -140,7 +140,7 @@ journal_compaction(_Config) ->
%% Delete some of the objects
ObjListD = testutil:generate_objects(10000, 2),
lists:foreach(fun({_R, O, _S}) ->
ok = leveled_bookie:book_riakdelete(Bookie1,
ok = testutil:book_riakdelete(Bookie1,
O#r_object.bucket,
O#r_object.key,
[])
@ -150,7 +150,7 @@ journal_compaction(_Config) ->
%% Now replace all the other objects
ObjList2 = testutil:generate_objects(40000, 10002),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjList2),
ok = leveled_bookie:book_compactjournal(Bookie1, 30000),
@ -184,10 +184,10 @@ fetchput_snapshot(_Config) ->
StartOpts1 = [{root_path, RootPath}, {max_journalsize, 30000000}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
ObjList1 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjList1),
SnapOpts1 = [{snapshot_bookie, Bookie1}],
{ok, SnapBookie1} = leveled_bookie:book_start(SnapOpts1),
@ -212,7 +212,7 @@ fetchput_snapshot(_Config) ->
ObjList2 = testutil:generate_objects(5000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
testutil:book_riakput(Bookie2, Obj, Spc) end,
ObjList2),
io:format("Replacement objects put~n"),
@ -226,7 +226,7 @@ fetchput_snapshot(_Config) ->
{ok, FNsA} = file:list_dir(RootPath ++ "/ledger/ledger_files"),
ObjList3 = testutil:generate_objects(15000, 5002),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie2, Obj, Spc) end,
testutil:book_riakput(Bookie2, Obj, Spc) end,
ObjList3),
ChkList3 = lists:sublist(lists:sort(ObjList3), 100),
testutil:check_forlist(Bookie2, ChkList3),
@ -291,7 +291,7 @@ load_and_count(_Config) ->
StartOpts1 = [{root_path, RootPath}, {max_journalsize, 50000000}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading initial small objects~n"),
G1 = fun testutil:generate_smallobjects/2,
@ -374,7 +374,7 @@ load_and_count_withdelete(_Config) ->
StartOpts1 = [{root_path, RootPath}, {max_journalsize, 50000000}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
io:format("Loading initial small objects~n"),
G1 = fun testutil:generate_smallobjects/2,
@ -396,8 +396,8 @@ load_and_count_withdelete(_Config) ->
testutil:check_forobject(Bookie1, TestObject),
{BucketD, KeyD} = leveled_codec:riakto_keydetails(TestObject),
{_, 1} = testutil:check_bucket_stats(Bookie1, BucketD),
ok = leveled_bookie:book_riakdelete(Bookie1, BucketD, KeyD, []),
not_found = leveled_bookie:book_riakget(Bookie1, BucketD, KeyD),
ok = testutil:book_riakdelete(Bookie1, BucketD, KeyD, []),
not_found = testutil:book_riakget(Bookie1, BucketD, KeyD),
{_, 0} = testutil:check_bucket_stats(Bookie1, BucketD),
io:format("Loading larger compressible objects~n"),
G2 = fun testutil:generate_compressibleobjects/2,
@ -416,7 +416,7 @@ load_and_count_withdelete(_Config) ->
Acc + 5000 end,
100000,
lists:seq(1, 20)),
not_found = leveled_bookie:book_riakget(Bookie1, BucketD, KeyD),
not_found = testutil:book_riakget(Bookie1, BucketD, KeyD),
ok = leveled_bookie:book_close(Bookie1),
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
testutil:check_formissingobject(Bookie2, BucketD, KeyD),
@ -461,7 +461,7 @@ space_clear_ondelete(_Config) ->
% Delete the keys
SW2 = os:timestamp(),
lists:foreach(fun({Bucket, Key}) ->
ok = leveled_bookie:book_riakdelete(Book1,
ok = testutil:book_riakdelete(Book1,
Bucket,
Key,
[])

View file

@ -24,7 +24,7 @@ small_load_with2i(_Config) ->
% low journal size to make sure > 1 created
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
testutil:check_forobject(Bookie1, TestObject),
@ -36,7 +36,7 @@ small_load_with2i(_Config) ->
ObjectGen,
IndexGen),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjL1),
ChkList1 = lists:sublist(lists:sort(ObjL1), 100),
testutil:check_forlist(Bookie1, ChkList1),
@ -48,7 +48,7 @@ small_load_with2i(_Config) ->
end,
Spc),
{B, K} = leveled_codec:riakto_keydetails(Obj),
leveled_bookie:book_riakdelete(Bookie1, B, K, DSpc)
testutil:book_riakdelete(Bookie1, B, K, DSpc)
end,
ChkList1),
%% Get the Buckets Keys and Hashes for the whole bucket
@ -116,7 +116,7 @@ query_count(_Config) ->
"Value1",
[],
{"MDK1", "MDV1"}),
ok = leveled_bookie:book_riakput(Book1, TestObject, TestSpec),
ok = testutil:book_riakput(Book1, TestObject, TestSpec),
testutil:check_forobject(Book1, TestObject),
testutil:check_formissingobject(Book1, "Bucket1", "Key2"),
testutil:check_forobject(Book1, TestObject),
@ -130,7 +130,7 @@ query_count(_Config) ->
V,
Indexes),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Book1,
testutil:book_riakput(Book1,
Obj,
Spc)
end,
@ -243,7 +243,7 @@ query_count(_Config) ->
V9 = testutil:get_compressiblevalue(),
Indexes9 = testutil:get_randomindexes_generator(8),
[{_RN, Obj9, Spc9}] = testutil:generate_objects(1, uuid, [], V9, Indexes9),
ok = leveled_bookie:book_riakput(Book2, Obj9, Spc9),
ok = testutil:book_riakput(Book2, Obj9, Spc9),
R9 = lists:map(fun({add, IdxF, IdxT}) ->
R = leveled_bookie:book_returnfolder(Book2,
{index_query,
@ -261,7 +261,7 @@ query_count(_Config) ->
Spc9),
Spc9Del = lists:map(fun({add, IdxF, IdxT}) -> {remove, IdxF, IdxT} end,
Spc9),
ok = leveled_bookie:book_riakput(Book2, Obj9, Spc9Del),
ok = testutil:book_riakput(Book2, Obj9, Spc9Del),
lists:foreach(fun({IdxF, IdxT, X}) ->
R = leveled_bookie:book_returnfolder(Book2,
{index_query,
@ -294,7 +294,7 @@ query_count(_Config) ->
end
end,
R9),
ok = leveled_bookie:book_riakput(Book3, Obj9, Spc9),
ok = testutil:book_riakput(Book3, Obj9, Spc9),
ok = leveled_bookie:book_close(Book3),
{ok, Book4} = leveled_bookie:book_start(RootPath, 2000, 50000000),
lists:foreach(fun({IdxF, IdxT, X}) ->

View file

@ -47,7 +47,7 @@ aae_bustedjournal(_Config) ->
{max_journalsize, 20000000}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
GenList = [2],
_CLs = testutil:load_objects(20000, GenList, Bookie1, TestObject,
@ -64,7 +64,7 @@ aae_bustedjournal(_Config) ->
KeyList = KeyF(),
20001 = length(KeyList),
HeadCount = lists:foldl(fun({B, K}, Acc) ->
case leveled_bookie:book_riakhead(Bookie2,
case testutil:book_riakhead(Bookie2,
B,
K) of
{ok, _} -> Acc + 1;
@ -75,7 +75,7 @@ aae_bustedjournal(_Config) ->
KeyList),
20001 = HeadCount,
GetCount = lists:foldl(fun({B, K}, Acc) ->
case leveled_bookie:book_riakget(Bookie2,
case testutil:book_riakget(Bookie2,
B,
K) of
{ok, _} -> Acc + 1;
@ -199,16 +199,16 @@ journal_compaction_bustedjournal(_Config) ->
{max_run_length, 10}],
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
{TestObject, TestSpec} = testutil:generate_testobject(),
ok = leveled_bookie:book_riakput(Bookie1, TestObject, TestSpec),
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
testutil:check_forobject(Bookie1, TestObject),
ObjList1 = testutil:generate_objects(50000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjList1),
%% Now replace all the objects
ObjList2 = testutil:generate_objects(50000, 2),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie1, Obj, Spc) end,
testutil:book_riakput(Bookie1, Obj, Spc) end,
ObjList2),
ok = leveled_bookie:book_close(Bookie1),

View file

@ -2,7 +2,11 @@
-include("../include/leveled.hrl").
-export([reset_filestructure/0,
-export([book_riakput/3,
book_riakdelete/4,
book_riakget/3,
book_riakhead/3,
reset_filestructure/0,
reset_filestructure/1,
check_bucket_stats/2,
check_forlist/2,
@ -39,6 +43,24 @@
-define(RETURN_TERMS, {true, undefined}).
book_riakput(Pid, RiakObject, IndexSpecs) ->
{Bucket, Key} = leveled_codec:riakto_keydetails(RiakObject),
leveled_book:book_put(Pid, Bucket, Key, RiakObject, IndexSpecs, ?RIAK_TAG).
book_riakdelete(Pid, Bucket, Key, IndexSpecs) ->
leveled_book:book_put(Pid, Bucket, Key, delete, IndexSpecs, ?RIAK_TAG).
book_riakget(Pid, Bucket, Key) ->
leveled_book:book_get(Pid, Bucket, Key, ?RIAK_TAG).
book_riakhead(Pid, Bucket, Key) ->
leveled_book:book_head(Pid, Bucket, Key, ?RIAK_TAG).
reset_filestructure() ->
reset_filestructure(0).
@ -81,7 +103,7 @@ check_forlist(Bookie, ChkList, Log) ->
true ->
ok
end,
R = leveled_bookie:book_riakget(Bookie,
R = book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
ok = case R of
@ -100,7 +122,7 @@ check_forlist(Bookie, ChkList, Log) ->
check_formissinglist(Bookie, ChkList) ->
SW = os:timestamp(),
lists:foreach(fun({_RN, Obj, _Spc}) ->
R = leveled_bookie:book_riakget(Bookie,
R = book_riakget(Bookie,
Obj#r_object.bucket,
Obj#r_object.key),
R = not_found end,
@ -109,10 +131,10 @@ check_formissinglist(Bookie, ChkList) ->
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
check_forobject(Bookie, TestObject) ->
{ok, TestObject} = leveled_bookie:book_riakget(Bookie,
{ok, TestObject} = book_riakget(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
{ok, HeadObject} = leveled_bookie:book_riakhead(Bookie,
{ok, HeadObject} = book_riakhead(Bookie,
TestObject#r_object.bucket,
TestObject#r_object.key),
ok = case {HeadObject#r_object.bucket,
@ -125,8 +147,8 @@ check_forobject(Bookie, TestObject) ->
end.
check_formissingobject(Bookie, Bucket, Key) ->
not_found = leveled_bookie:book_riakget(Bookie, Bucket, Key),
not_found = leveled_bookie:book_riakhead(Bookie, Bucket, Key).
not_found = book_riakget(Bookie, Bucket, Key),
not_found = book_riakhead(Bookie, Bucket, Key).
generate_testobject() ->
@ -236,7 +258,7 @@ load_objects(ChunkSize, GenList, Bookie, TestObject, Generator) ->
ObjListA = Generator(ChunkSize, KN),
StartWatchA = os:timestamp(),
lists:foreach(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Bookie, Obj, Spc)
book_riakput(Bookie, Obj, Spc)
end,
ObjListA),
Time = timer:now_diff(os:timestamp(), StartWatchA),
@ -289,7 +311,7 @@ check_indexed_objects(Book, B, KSpecL, V) ->
% Check all objects match, return what should be the results of an all
% index query
IdxR = lists:map(fun({K, Spc}) ->
{ok, O} = leveled_bookie:book_riakget(Book, B, K),
{ok, O} = book_riakget(Book, B, K),
V = testutil:get_value(O),
{add,
"idx1_bin",
@ -333,9 +355,7 @@ put_indexed_objects(Book, Bucket, Count) ->
IndexGen,
Bucket),
KSpecL = lists:map(fun({_RN, Obj, Spc}) ->
leveled_bookie:book_riakput(Book,
Obj,
Spc),
book_riakput(Book, Obj, Spc),
{testutil:get_key(Obj), Spc}
end,
ObjL1),
@ -364,9 +384,7 @@ put_altered_indexed_objects(Book, Bucket, KSpecL, RemoveOld2i) ->
V,
IndexGen,
AddSpc),
ok = leveled_bookie:book_riakput(Book,
O,
AltSpc),
ok = book_riakput(Book, O, AltSpc),
{K, AltSpc} end,
KSpecL),
{RplKSpecL, V}.