Mas d34 i453 eqwalizer (#454)
* Add eqwalizer and clear for codec & sst The eqwalizer errors highlighted the need in several places for type clarification. Within tests there are some issue where a type is assumed, and so ignore has been used to handle this rather than write more complex code to be explicit about the assumption. The handling of arrays isn't great by eqwalizer - to be specific about the content of array causes issues when initialising an array. Perhaps a type (map maybe) where one can be more explicit about types might be a better option (even if there is a minimal performance impact). The use of a ?TOMB_COUNT defined option complicated the code much more with eqwalizer. So for now, there is no developer option to disable ?TOMB_COUNT. Test fixes required where strings have been used for buckets/keys not binaries. The leveled_sst statem needs a different state record for starting when compared to other modes. The state record has been divided up to reflect this, to make type management easier. The impact on performance needs to be tested. * Update ct tests to support binary keys/buckets only * Eqwalizer for leveled_cdb and leveled_tictac As array is used in leveled_tictac - there is the same issue as with leveled_sst * Remove redundant indirection of leveled_rand A legacy of pre-20 OTP * Morde modules eqwalized ebloom/log/util/monitor * Eqwalize further modules elp eqwalize leveled_codec; elp eqwalize leveled_sst; elp eqwalize leveled_cdb; elp eqwalize leveled_tictac; elp eqwalize leveled_log; elp eqwalize leveled_monitor; elp eqwalize leveled_head; elp eqwalize leveled_ebloom; elp eqwalize leveled_iclerk All concurrently OK * Refactor unit tests to use binary() no string() in key Previously string() was allowed just to avoid having to change all these tests. Go through the pain now, as part of eqwalizing. * Add fixes for penciller, inker Add a new ?IS_DEF macro to replace =/= undefined. Now more explicit about primary, object and query keys * Further fixes Need to clarify functions used by runner - where keys , query keys and object keys are used * Further eqwalisation * Eqwalize leveled_pmanifest Also make implementation independent of choice of dict - i.e. one can save a manifest using dict for blooms/pending_deletions and then open a manifest with code that uses a different type. Allow for slow dict to be replaced with map. Would not be backwards compatible though, without further thought - i.e. if you upgrade then downgrade. Redundant code created by leveled_sst refactoring removed. * Fix backwards compatibility issues * Manifest Entry to belong to leveled_pmanifest There are two manifests - leveled_pmanifest and leveled_imanifest. Both have manifest_entry() type objects, but these types are different. To avoid confusion don't include the pmanifest manifest_entry() within the global include file - be specific that it belongs to the leveled_pmanifest module * Ignore elp file - large binary * Update src/leveled_pmem.erl Remove unnecessary empty list from type definition Co-authored-by: Thomas Arts <thomas.arts@quviq.com> --------- Co-authored-by: Thomas Arts <thomas.arts@quviq.com>
This commit is contained in:
parent
1be55fcd15
commit
aaeac7ba36
30 changed files with 4778 additions and 3334 deletions
|
@ -73,7 +73,7 @@ application_defined_tag_tester(KeyCount, Tag, Functions, ExpectMD) ->
|
|||
[{bespoke_tag1, retain}, {bespoke_tag2, retain}]},
|
||||
{override_functions, Functions}],
|
||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
Value = leveled_rand:rand_bytes(512),
|
||||
Value = crypto:strong_rand_bytes(512),
|
||||
MapFun =
|
||||
fun(C) ->
|
||||
{C, object_generator(C, Value)}
|
||||
|
@ -119,7 +119,7 @@ application_defined_tag_tester(KeyCount, Tag, Functions, ExpectMD) ->
|
|||
|
||||
object_generator(Count, V) ->
|
||||
Hash = erlang:phash2({count, V}),
|
||||
Random = leveled_rand:uniform(1000),
|
||||
Random = rand:uniform(1000),
|
||||
Key = list_to_binary(leveled_util:generate_uuid()),
|
||||
Bucket = <<"B">>,
|
||||
{Bucket,
|
||||
|
|
|
@ -55,15 +55,19 @@ simple_put_fetch_head_delete(_Config) ->
|
|||
|
||||
simple_test_withlog(LogLevel, ForcedLogs) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
StartOpts1 = [{root_path, RootPath},
|
||||
{sync_strategy, testutil:sync_strategy()},
|
||||
{log_level, LogLevel},
|
||||
{forced_logs, ForcedLogs}],
|
||||
StartOpts1 =
|
||||
[
|
||||
{root_path, RootPath},
|
||||
{sync_strategy, testutil:sync_strategy()},
|
||||
{log_level, LogLevel},
|
||||
{forced_logs, ForcedLogs},
|
||||
{max_pencillercachesize, 200}
|
||||
],
|
||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
{TestObject, TestSpec} = testutil:generate_testobject(),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
|
||||
testutil:check_formissingobject(Bookie1, <<"Bucket1">>, <<"Key2">>),
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
StartOpts2 = [{root_path, RootPath},
|
||||
{max_journalsize, 3000000},
|
||||
|
@ -78,29 +82,49 @@ simple_test_withlog(LogLevel, ForcedLogs) ->
|
|||
ChkList1 = lists:sublist(lists:sort(ObjList1), 100),
|
||||
testutil:check_forlist(Bookie2, ChkList1),
|
||||
testutil:check_forobject(Bookie2, TestObject),
|
||||
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
|
||||
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", "Value2",
|
||||
[{add, "Index1", "Term1"}]),
|
||||
{ok, "Value2"} = leveled_bookie:book_get(Bookie2, "Bucket1", "Key2"),
|
||||
{ok, {62888926, S, undefined}} =
|
||||
leveled_bookie:book_head(Bookie2, "Bucket1", "Key2"),
|
||||
true = (S == 58) or (S == 60),
|
||||
testutil:check_formissingobject(Bookie2, <<"Bucket1">>, <<"Key2">>),
|
||||
ok =
|
||||
leveled_bookie:book_put(
|
||||
Bookie2,
|
||||
<<"Bucket1">>,
|
||||
<<"Key2">>,
|
||||
<<"Value2">>,
|
||||
[{add, <<"Index1">>, <<"Term1">>}]
|
||||
),
|
||||
{ok, <<"Value2">>} =
|
||||
leveled_bookie:book_get(Bookie2, <<"Bucket1">>, <<"Key2">>),
|
||||
{ok, {2220864, S, undefined}} =
|
||||
leveled_bookie:book_head(Bookie2, <<"Bucket1">>, <<"Key2">>),
|
||||
true = (S == 63) or (S == 65),
|
||||
% After OTP 26 the object is 58 bytes not 60
|
||||
testutil:check_formissingobject(Bookie2, "Bucket1", "Key2"),
|
||||
ok = leveled_bookie:book_put(Bookie2, "Bucket1", "Key2", <<"Value2">>,
|
||||
[{remove, "Index1", "Term1"},
|
||||
{add, "Index1", <<"Term2">>}]),
|
||||
{ok, <<"Value2">>} = leveled_bookie:book_get(Bookie2, "Bucket1", "Key2"),
|
||||
testutil:check_formissingobject(Bookie2, <<"Bucket1">>, <<"Key2">>),
|
||||
ok =
|
||||
leveled_bookie:book_put(
|
||||
Bookie2,
|
||||
<<"Bucket1">>,
|
||||
<<"Key2">>,
|
||||
<<"Value2">>,
|
||||
[{remove, <<"Index1">>, <<"Term1">>},
|
||||
{add, <<"Index1">>, <<"Term2">>}]
|
||||
),
|
||||
{ok, <<"Value2">>} =
|
||||
leveled_bookie:book_get(Bookie2, <<"Bucket1">>, <<"Key2">>),
|
||||
ok = leveled_bookie:book_close(Bookie2),
|
||||
{ok, Bookie3} = leveled_bookie:book_start(StartOpts2),
|
||||
{ok, <<"Value2">>} = leveled_bookie:book_get(Bookie3, "Bucket1", "Key2"),
|
||||
ok = leveled_bookie:book_delete(Bookie3, "Bucket1", "Key2",
|
||||
[{remove, "Index1", "Term1"}]),
|
||||
not_found = leveled_bookie:book_get(Bookie3, "Bucket1", "Key2"),
|
||||
not_found = leveled_bookie:book_head(Bookie3, "Bucket1", "Key2"),
|
||||
{ok, <<"Value2">>} =
|
||||
leveled_bookie:book_get(Bookie3, <<"Bucket1">>, <<"Key2">>),
|
||||
ok =
|
||||
leveled_bookie:book_delete(
|
||||
Bookie3,
|
||||
<<"Bucket1">>,
|
||||
<<"Key2">>,
|
||||
[{remove, <<"Index1">>, <<"Term1">>}]
|
||||
),
|
||||
not_found = leveled_bookie:book_get(Bookie3, <<"Bucket1">>, <<"Key2">>),
|
||||
not_found = leveled_bookie:book_head(Bookie3, <<"Bucket1">>, <<"Key2">>),
|
||||
ok = leveled_bookie:book_close(Bookie3),
|
||||
{ok, Bookie4} = leveled_bookie:book_start(StartOpts2),
|
||||
not_found = leveled_bookie:book_get(Bookie4, "Bucket1", "Key2"),
|
||||
not_found = leveled_bookie:book_get(Bookie4, <<"Bucket1">>, <<"Key2">>),
|
||||
ok = leveled_bookie:book_destroy(Bookie4).
|
||||
|
||||
many_put_fetch_head(_Config) ->
|
||||
|
@ -168,7 +192,7 @@ many_put_fetch_head(_Config) ->
|
|||
not_found = leveled_bookie:book_sqn(Bookie3,
|
||||
testutil:get_bucket(TestObject),
|
||||
testutil:get_key(TestObject)),
|
||||
testutil:check_formissingobject(Bookie3, "Bookie1", "MissingKey0123"),
|
||||
testutil:check_formissingobject(Bookie3, <<"Bookie1">>, <<"MissingKey0123">>),
|
||||
ok = leveled_bookie:book_destroy(Bookie3).
|
||||
|
||||
bigjournal_littlejournal(_Config) ->
|
||||
|
@ -181,7 +205,7 @@ bigjournal_littlejournal(_Config) ->
|
|||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(100, 1, [],
|
||||
leveled_rand:rand_bytes(10000),
|
||||
crypto:strong_rand_bytes(10000),
|
||||
fun() -> [] end, <<"B">>),
|
||||
testutil:riakload(Bookie1, ObjL1),
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
|
@ -189,7 +213,7 @@ bigjournal_littlejournal(_Config) ->
|
|||
{ok, Bookie2} = leveled_bookie:book_start(StartOpts2),
|
||||
ObjL2 =
|
||||
testutil:generate_objects(10, 1000, [],
|
||||
leveled_rand:rand_bytes(10000),
|
||||
crypto:strong_rand_bytes(10000),
|
||||
fun() -> [] end, <<"B">>),
|
||||
testutil:riakload(Bookie2, ObjL2),
|
||||
testutil:check_forlist(Bookie2, ObjL1),
|
||||
|
@ -214,7 +238,7 @@ bigsst_littlesst(_Config) ->
|
|||
100000,
|
||||
1,
|
||||
[],
|
||||
leveled_rand:rand_bytes(100),
|
||||
crypto:strong_rand_bytes(100),
|
||||
fun() -> [] end,
|
||||
<<"B">>)
|
||||
),
|
||||
|
@ -260,13 +284,16 @@ journal_compaction_tester(Restart, WRP) ->
|
|||
ChkList1 = lists:sublist(lists:sort(ObjList1), 10000),
|
||||
testutil:check_forlist(Bookie0, ChkList1),
|
||||
testutil:check_forobject(Bookie0, TestObject),
|
||||
{B2, K2, V2, Spec2, MD} = {"Bucket2",
|
||||
"Key2",
|
||||
"Value2",
|
||||
[],
|
||||
[{"MDK2", "MDV2"}]},
|
||||
{TestObject2, TestSpec2} = testutil:generate_testobject(B2, K2,
|
||||
V2, Spec2, MD),
|
||||
{B2, K2, V2, Spec2, MD} =
|
||||
{
|
||||
<<"Bucket2">>,
|
||||
<<"Key2">>,
|
||||
<<"Value2">>,
|
||||
[],
|
||||
[{<<"MDK2">>, <<"MDV2">>}]
|
||||
},
|
||||
{TestObject2, TestSpec2} =
|
||||
testutil:generate_testobject(B2, K2, V2, Spec2, MD),
|
||||
ok = testutil:book_riakput(Bookie0, TestObject2, TestSpec2),
|
||||
ok = leveled_bookie:book_compactjournal(Bookie0, 30000),
|
||||
testutil:check_forlist(Bookie0, ChkList1),
|
||||
|
@ -277,13 +304,15 @@ journal_compaction_tester(Restart, WRP) ->
|
|||
testutil:check_forobject(Bookie0, TestObject2),
|
||||
%% Delete some of the objects
|
||||
ObjListD = testutil:generate_objects(10000, 2),
|
||||
lists:foreach(fun({_R, O, _S}) ->
|
||||
testutil:book_riakdelete(Bookie0,
|
||||
testutil:get_bucket(O),
|
||||
testutil:get_key(O),
|
||||
[])
|
||||
end,
|
||||
ObjListD),
|
||||
lists:foreach(
|
||||
fun({_R, O, _S}) ->
|
||||
testutil:book_riakdelete(Bookie0,
|
||||
testutil:get_bucket(O),
|
||||
testutil:get_key(O),
|
||||
[])
|
||||
end,
|
||||
ObjListD
|
||||
),
|
||||
|
||||
%% Now replace all the other objects
|
||||
ObjList2 = testutil:generate_objects(40000, 10002),
|
||||
|
@ -539,11 +568,11 @@ fetchput_snapshot(_Config) ->
|
|||
% smaller due to replacements and files deleting
|
||||
% This is dependent on the sleep though (yuk)
|
||||
|
||||
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, "Bucket1"),
|
||||
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, <<"Bucket1">>),
|
||||
true = B1Size > 0,
|
||||
true = B1Count == 1,
|
||||
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, "Bucket1"),
|
||||
{BSize, BCount} = testutil:check_bucket_stats(Bookie2, "Bucket"),
|
||||
{B1Size, B1Count} = testutil:check_bucket_stats(Bookie2, <<"Bucket1">>),
|
||||
{BSize, BCount} = testutil:check_bucket_stats(Bookie2, <<"Bucket">>),
|
||||
true = BSize > 0,
|
||||
true = BCount == 180000,
|
||||
|
||||
|
@ -622,82 +651,78 @@ load_and_count(JournalSize, BookiesMemSize, PencillerMemSize) ->
|
|||
testutil:check_forobject(Bookie1, TestObject),
|
||||
io:format("Loading initial small objects~n"),
|
||||
G1 = fun testutil:generate_smallobjects/2,
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
TestObject,
|
||||
G1),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, "Bucket"),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, TestObject, G1),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
io:format("Loading larger compressible objects~n"),
|
||||
G2 = fun testutil:generate_compressibleobjects/2,
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
TestObject,
|
||||
G2),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, "Bucket"),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
100000,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, TestObject, G2),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
100000,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
io:format("Replacing small objects~n"),
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
TestObject,
|
||||
G1),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, "Bucket"),
|
||||
if
|
||||
Count == 200000 ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, TestObject, G1),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Count == 200000 ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
io:format("Loading more small objects~n"),
|
||||
io:format("Now with unused snapshot so deletions are blocked~n"),
|
||||
{ok, PclClone, null} =
|
||||
leveled_bookie:book_snapshot(Bookie1, ledger, undefined, true),
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
TestObject,
|
||||
G2),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, "Bucket"),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
200000,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, TestObject, G2),
|
||||
{_S, Count} =
|
||||
testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
200000,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
ok = leveled_penciller:pcl_close(PclClone),
|
||||
{_S, 300000} = testutil:check_bucket_stats(Bookie1, "Bucket"),
|
||||
{_S, 300000} = testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
|
||||
{_, 300000} = testutil:check_bucket_stats(Bookie2, "Bucket"),
|
||||
{_, 300000} = testutil:check_bucket_stats(Bookie2, <<"Bucket">>),
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie2),
|
||||
|
||||
|
@ -722,21 +747,19 @@ load_and_count_withdelete(_Config) ->
|
|||
testutil:check_forobject(Bookie1, TestObject),
|
||||
io:format("Loading initial small objects~n"),
|
||||
G1 = fun testutil:generate_smallobjects/2,
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
TestObject,
|
||||
G1),
|
||||
{_S, Count} = testutil:check_bucket_stats(Bookie1,
|
||||
"Bucket"),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, TestObject, G1),
|
||||
{_S, Count} = testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
0,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
{BucketD, KeyD} =
|
||||
{testutil:get_bucket(TestObject), testutil:get_key(TestObject)},
|
||||
|
@ -746,21 +769,19 @@ load_and_count_withdelete(_Config) ->
|
|||
{_, 0} = testutil:check_bucket_stats(Bookie1, BucketD),
|
||||
io:format("Loading larger compressible objects~n"),
|
||||
G2 = fun testutil:generate_compressibleobjects/2,
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
testutil:load_objects(5000,
|
||||
[Acc + 2],
|
||||
Bookie1,
|
||||
no_check,
|
||||
G2),
|
||||
{_S, Count} = testutil:check_bucket_stats(Bookie1,
|
||||
"Bucket"),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
100000,
|
||||
lists:seq(1, 20)),
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
testutil:load_objects(
|
||||
5000, [Acc + 2], Bookie1, no_check, G2),
|
||||
{_S, Count} = testutil:check_bucket_stats(Bookie1, <<"Bucket">>),
|
||||
if
|
||||
Acc + 5000 == Count ->
|
||||
ok
|
||||
end,
|
||||
Acc + 5000 end,
|
||||
100000,
|
||||
lists:seq(1, 20)
|
||||
),
|
||||
not_found = testutil:book_riakget(Bookie1, BucketD, KeyD),
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
|
||||
|
@ -780,11 +801,8 @@ space_clear_ondelete(_Config) ->
|
|||
{sync_strategy, testutil:sync_strategy()}],
|
||||
{ok, Book1} = leveled_bookie:book_start(StartOpts1),
|
||||
G2 = fun testutil:generate_compressibleobjects/2,
|
||||
testutil:load_objects(20000,
|
||||
[uuid, uuid, uuid, uuid],
|
||||
Book1,
|
||||
no_check,
|
||||
G2),
|
||||
testutil:load_objects(
|
||||
20000, [uuid, uuid, uuid, uuid], Book1, no_check, G2),
|
||||
|
||||
FoldKeysFun = fun(B, K, Acc) -> [{B, K}|Acc] end,
|
||||
|
||||
|
@ -808,10 +826,9 @@ space_clear_ondelete(_Config) ->
|
|||
FoldObjectsFun = fun(B, K, ObjBin, Acc) ->
|
||||
[{B, K, erlang:phash2(ObjBin)}|Acc] end,
|
||||
|
||||
{async, HTreeF1} = leveled_bookie:book_objectfold(Book1,
|
||||
?RIAK_TAG,
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
{async, HTreeF1} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Book1, ?RIAK_TAG, {FoldObjectsFun, []}, false),
|
||||
|
||||
% This query does not Snap PreFold - and so will not prevent
|
||||
% pending deletes from prompting actual deletes
|
||||
|
@ -822,32 +839,34 @@ space_clear_ondelete(_Config) ->
|
|||
|
||||
% Delete the keys
|
||||
SW2 = os:timestamp(),
|
||||
lists:foreach(fun({Bucket, Key}) ->
|
||||
testutil:book_riakdelete(Book1,
|
||||
Bucket,
|
||||
Key,
|
||||
[])
|
||||
end,
|
||||
KL1),
|
||||
io:format("Deletion took ~w microseconds for 80K keys~n",
|
||||
[timer:now_diff(os:timestamp(), SW2)]),
|
||||
|
||||
|
||||
lists:foreach(
|
||||
fun({Bucket, Key}) ->
|
||||
testutil:book_riakdelete(Book1, Bucket, Key, [])
|
||||
end,
|
||||
KL1),
|
||||
io:format(
|
||||
"Deletion took ~w microseconds for 80K keys~n",
|
||||
[timer:now_diff(os:timestamp(), SW2)]),
|
||||
|
||||
ok = leveled_bookie:book_compactjournal(Book1, 30000),
|
||||
F = fun leveled_bookie:book_islastcompactionpending/1,
|
||||
lists:foldl(fun(X, Pending) ->
|
||||
case Pending of
|
||||
false ->
|
||||
false;
|
||||
true ->
|
||||
io:format("Loop ~w waiting for journal "
|
||||
++ "compaction to complete~n", [X]),
|
||||
timer:sleep(20000),
|
||||
F(Book1)
|
||||
end end,
|
||||
true,
|
||||
lists:seq(1, 15)),
|
||||
lists:foldl(
|
||||
fun(X, Pending) ->
|
||||
case Pending of
|
||||
false ->
|
||||
false;
|
||||
true ->
|
||||
io:format(
|
||||
"Loop ~w waiting for journal "
|
||||
"compaction to complete~n",
|
||||
[X]
|
||||
),
|
||||
timer:sleep(20000),
|
||||
F(Book1)
|
||||
end
|
||||
end,
|
||||
true,
|
||||
lists:seq(1, 15)),
|
||||
io:format("Waiting for journal deletes - blocked~n"),
|
||||
timer:sleep(20000),
|
||||
|
||||
|
@ -1113,7 +1132,7 @@ many_put_fetch_switchcompression_tester(CompressionMethod) ->
|
|||
|
||||
%% Change method back again
|
||||
{ok, Bookie3} = leveled_bookie:book_start(StartOpts1),
|
||||
testutil:check_formissingobject(Bookie3, "Bookie1", "MissingKey0123"),
|
||||
testutil:check_formissingobject(Bookie3, <<"Bookie1">>, "MissingKey0123"),
|
||||
lists:foreach(
|
||||
fun(CL) -> ok = testutil:check_forlist(Bookie3, CL) end, CL2s),
|
||||
lists:foreach(
|
||||
|
@ -1244,10 +1263,12 @@ bigpcl_bucketlist(_Config) ->
|
|||
|
||||
MapFun =
|
||||
fun(B) ->
|
||||
testutil:generate_objects(ObjectCount, 1, [],
|
||||
leveled_rand:rand_bytes(100),
|
||||
fun() -> [] end,
|
||||
B)
|
||||
testutil:generate_objects(
|
||||
ObjectCount, 1, [],
|
||||
crypto:strong_rand_bytes(100),
|
||||
fun() -> [] end,
|
||||
B
|
||||
)
|
||||
end,
|
||||
ObjLofL = lists:map(MapFun, BucketList),
|
||||
lists:foreach(fun(ObjL) -> testutil:riakload(Bookie1, ObjL) end, ObjLofL),
|
||||
|
@ -1263,11 +1284,15 @@ bigpcl_bucketlist(_Config) ->
|
|||
FBAccT = {BucketFold, sets:new()},
|
||||
|
||||
{async, BucketFolder1} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list, BucketList},
|
||||
FBAccT,
|
||||
false, false, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list, BucketList},
|
||||
FBAccT,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
),
|
||||
|
||||
{FoldTime1, BucketList1} = timer:tc(BucketFolder1, []),
|
||||
true = BucketCount == sets:size(BucketList1),
|
||||
|
@ -1276,11 +1301,15 @@ bigpcl_bucketlist(_Config) ->
|
|||
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
|
||||
|
||||
{async, BucketFolder2} =
|
||||
leveled_bookie:book_headfold(Bookie2,
|
||||
?RIAK_TAG,
|
||||
{bucket_list, BucketList},
|
||||
FBAccT,
|
||||
false, false, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie2,
|
||||
?RIAK_TAG,
|
||||
{bucket_list, BucketList},
|
||||
FBAccT,
|
||||
false,
|
||||
false,
|
||||
false
|
||||
),
|
||||
{FoldTime2, BucketList2} = timer:tc(BucketFolder2, []),
|
||||
true = BucketCount == sets:size(BucketList2),
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ expiring_indexes(_Config) ->
|
|||
Indexes9 = testutil:get_randomindexes_generator(2),
|
||||
TempRiakObjects =
|
||||
testutil:generate_objects(
|
||||
KeyCount, binary_uuid, [], V9, Indexes9, "riakBucket"),
|
||||
KeyCount, binary_uuid, [], V9, Indexes9, <<"riakBucket">>),
|
||||
|
||||
IBKL1 = testutil:stdload_expiring(Bookie1, KeyCount, Future),
|
||||
lists:foreach(
|
||||
|
@ -147,11 +147,13 @@ expiring_indexes(_Config) ->
|
|||
Bookie1, B0, K0, 5, <<"value">>, leveled_util:integer_now() + 10),
|
||||
timer:sleep(1000),
|
||||
{async, Folder2} = IndexFold(),
|
||||
leveled_bookie:book_indexfold(Bookie1,
|
||||
B0,
|
||||
{FoldFun, InitAcc},
|
||||
{<<"temp_int">>, 5, 8},
|
||||
{true, undefined}),
|
||||
leveled_bookie:book_indexfold(
|
||||
Bookie1,
|
||||
B0,
|
||||
{FoldFun, InitAcc},
|
||||
{<<"temp_int">>, 5, 8},
|
||||
{true, undefined}
|
||||
),
|
||||
QR2 = Folder2(),
|
||||
io:format("Query with additional entry length ~w~n", [length(QR2)]),
|
||||
true = lists:sort(QR2) == lists:sort([{5, B0, K0}|LoadedEntriesInRange]),
|
||||
|
@ -208,11 +210,9 @@ breaking_folds(_Config) ->
|
|||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
||||
IndexGen = testutil:get_randomindexes_generator(8),
|
||||
ObjL1 = testutil:generate_objects(KeyCount,
|
||||
binary_uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(
|
||||
KeyCount, binary_uuid, [], ObjectGen, IndexGen),
|
||||
testutil:riakload(Bookie1, ObjL1),
|
||||
|
||||
% Find all keys index, and then same again but stop at a midpoint using a
|
||||
|
@ -261,7 +261,6 @@ breaking_folds(_Config) ->
|
|||
io:format("Index fold with result size ~w~n", [length(KeyList2)]),
|
||||
true = KeyCount div 2 == length(KeyList2),
|
||||
|
||||
|
||||
HeadFoldFun =
|
||||
fun(_B, K, PO, Acc) ->
|
||||
{proxy_object, _MDBin, Size, _FF} = binary_to_term(PO),
|
||||
|
@ -287,10 +286,14 @@ breaking_folds(_Config) ->
|
|||
end
|
||||
end,
|
||||
{async, HeadFolderToMidK} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowFun(HeadFoldFun), []},
|
||||
true, true, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowFun(HeadFoldFun), []},
|
||||
true,
|
||||
true,
|
||||
false
|
||||
),
|
||||
KeySizeList2 = lists:reverse(CatchingFold(HeadFolderToMidK)),
|
||||
io:format("Head fold with result size ~w~n", [length(KeySizeList2)]),
|
||||
true = KeyCount div 2 == length(KeySizeList2),
|
||||
|
@ -300,21 +303,25 @@ breaking_folds(_Config) ->
|
|||
[{K,byte_size(V)}|Acc]
|
||||
end,
|
||||
{async, ObjectFolderKO} =
|
||||
leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{ObjFoldFun, []},
|
||||
false,
|
||||
key_order),
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{ObjFoldFun, []},
|
||||
false,
|
||||
key_order
|
||||
),
|
||||
ObjSizeList1 = lists:reverse(ObjectFolderKO()),
|
||||
io:format("Obj fold with result size ~w~n", [length(ObjSizeList1)]),
|
||||
true = KeyCount == length(ObjSizeList1),
|
||||
|
||||
{async, ObjFolderToMidK} =
|
||||
leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowFun(ObjFoldFun), []},
|
||||
false,
|
||||
key_order),
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowFun(ObjFoldFun), []},
|
||||
false,
|
||||
key_order
|
||||
),
|
||||
ObjSizeList2 = lists:reverse(CatchingFold(ObjFolderToMidK)),
|
||||
io:format("Object fold with result size ~w~n", [length(ObjSizeList2)]),
|
||||
true = KeyCount div 2 == length(ObjSizeList2),
|
||||
|
@ -324,11 +331,13 @@ breaking_folds(_Config) ->
|
|||
% that was terminated by reaching a point in the key range .. as results
|
||||
% will not be passed to the fold function in key order
|
||||
{async, ObjectFolderSO} =
|
||||
leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{ObjFoldFun, []},
|
||||
false,
|
||||
sqn_order),
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{ObjFoldFun, []},
|
||||
false,
|
||||
sqn_order
|
||||
),
|
||||
ObjSizeList1_SO = lists:reverse(ObjectFolderSO()),
|
||||
io:format("Obj fold with result size ~w~n", [length(ObjSizeList1_SO)]),
|
||||
true = KeyCount == length(ObjSizeList1_SO),
|
||||
|
@ -346,33 +355,26 @@ breaking_folds(_Config) ->
|
|||
end
|
||||
end,
|
||||
{async, ObjFolderTo1K} =
|
||||
leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowThousandFun(ObjFoldFun), []},
|
||||
false,
|
||||
sqn_order),
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldThrowThousandFun(ObjFoldFun), []},
|
||||
false,
|
||||
sqn_order
|
||||
),
|
||||
ObjSizeList2_SO = lists:reverse(CatchingFold(ObjFolderTo1K)),
|
||||
io:format("Object fold with result size ~w~n", [length(ObjSizeList2_SO)]),
|
||||
true = 1000 == length(ObjSizeList2_SO),
|
||||
|
||||
ObjL2 = testutil:generate_objects(10,
|
||||
binary_uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
"B2"),
|
||||
ObjL3 = testutil:generate_objects(10,
|
||||
binary_uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
"B3"),
|
||||
ObjL4 = testutil:generate_objects(10,
|
||||
binary_uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
"B4"),
|
||||
ObjL2 =
|
||||
testutil:generate_objects(
|
||||
10, binary_uuid, [], ObjectGen, IndexGen, <<"B2">>),
|
||||
ObjL3 =
|
||||
testutil:generate_objects(
|
||||
10, binary_uuid, [], ObjectGen, IndexGen, <<"B3">>),
|
||||
ObjL4 =
|
||||
testutil:generate_objects(
|
||||
10, binary_uuid, [], ObjectGen, IndexGen, <<"B4">>),
|
||||
testutil:riakload(Bookie1, ObjL2),
|
||||
testutil:riakload(Bookie1, ObjL3),
|
||||
testutil:riakload(Bookie1, ObjL4),
|
||||
|
@ -396,20 +398,16 @@ breaking_folds(_Config) ->
|
|||
end,
|
||||
|
||||
{async, StopAt3BucketFolder} =
|
||||
leveled_bookie:book_bucketlist(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{StopAt3Fun, []},
|
||||
all),
|
||||
leveled_bookie:book_bucketlist(
|
||||
Bookie1, ?RIAK_TAG, {StopAt3Fun, []}, all),
|
||||
BucketListSA3 = lists:reverse(CatchingFold(StopAt3BucketFolder)),
|
||||
io:format("bucket list with result ~w~n", [BucketListSA3]),
|
||||
true = [<<"B2">>, <<"B3">>] == BucketListSA3,
|
||||
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
testutil:reset_filestructure().
|
||||
|
||||
|
||||
|
||||
single_object_with2i(_Config) ->
|
||||
% Load a single object with an integer and a binary
|
||||
% index and query for it
|
||||
|
@ -429,36 +427,40 @@ single_object_with2i(_Config) ->
|
|||
{async, IdxFolder1} =
|
||||
leveled_bookie:book_indexfold(
|
||||
Bookie1,
|
||||
"Bucket1",
|
||||
<<"Bucket1">>,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{list_to_binary("binary_bin"),
|
||||
<<99:32/integer>>, <<101:32/integer>>},
|
||||
{true, undefined}),
|
||||
R1 = IdxFolder1(),
|
||||
io:format("R1 of ~w~n", [R1]),
|
||||
true = [{<<100:32/integer>>,"Key1"}] == R1,
|
||||
true = [{<<100:32/integer>>, <<"Key1">>}] == R1,
|
||||
|
||||
IdxQ2 = {index_query,
|
||||
"Bucket1",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{list_to_binary("integer_int"),
|
||||
99, 101},
|
||||
{true, undefined}},
|
||||
IdxQ2 =
|
||||
{
|
||||
index_query,
|
||||
<<"Bucket1">>,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{list_to_binary("integer_int"), 99, 101},
|
||||
{true, undefined}
|
||||
},
|
||||
{async, IdxFolder2} = leveled_bookie:book_returnfolder(Bookie1, IdxQ2),
|
||||
R2 = IdxFolder2(),
|
||||
io:format("R2 of ~w~n", [R2]),
|
||||
true = [{100,"Key1"}] == R2,
|
||||
true = [{100, <<"Key1">>}] == R2,
|
||||
|
||||
IdxQ3 = {index_query,
|
||||
{"Bucket1", "Key1"},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{list_to_binary("integer_int"),
|
||||
99, 101},
|
||||
{true, undefined}},
|
||||
IdxQ3 =
|
||||
{
|
||||
index_query,
|
||||
{<<"Bucket1">>, <<"Key1">>},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{list_to_binary("integer_int"), 99, 101},
|
||||
{true, undefined}
|
||||
},
|
||||
{async, IdxFolder3} = leveled_bookie:book_returnfolder(Bookie1, IdxQ3),
|
||||
R3 = IdxFolder3(),
|
||||
io:format("R2 of ~w~n", [R3]),
|
||||
true = [{100,"Key1"}] == R3,
|
||||
true = [{100, <<"Key1">>}] == R3,
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie1),
|
||||
testutil:reset_filestructure().
|
||||
|
@ -473,7 +475,7 @@ small_load_with2i(_Config) ->
|
|||
{TestObject, TestSpec} = testutil:generate_testobject(),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
testutil:check_formissingobject(Bookie1, "Bucket1", "Key2"),
|
||||
testutil:check_formissingobject(Bookie1, <<"Bucket1">>, <<"Key2">>),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
||||
IndexGen = testutil:get_randomindexes_generator(8),
|
||||
|
@ -486,58 +488,60 @@ small_load_with2i(_Config) ->
|
|||
testutil:check_forobject(Bookie1, TestObject),
|
||||
|
||||
% Find all keys index, and then just the last key
|
||||
IdxQ1 = {index_query,
|
||||
"Bucket",
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{true, undefined}},
|
||||
IdxQ1 =
|
||||
{
|
||||
index_query,
|
||||
<<"Bucket">>,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{true, undefined}
|
||||
},
|
||||
{async, IdxFolder} = leveled_bookie:book_returnfolder(Bookie1, IdxQ1),
|
||||
KeyList1 = lists:usort(IdxFolder()),
|
||||
true = 10000 == length(KeyList1),
|
||||
{LastTerm, LastKey} = lists:last(KeyList1),
|
||||
IdxQ2 = {index_query,
|
||||
{"Bucket", LastKey},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, LastTerm, <<"|">>},
|
||||
{false, undefined}},
|
||||
IdxQ2 =
|
||||
{
|
||||
index_query,
|
||||
{<<"Bucket">>, LastKey},
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{<<"idx1_bin">>, LastTerm, <<"|">>},
|
||||
{false, undefined}
|
||||
},
|
||||
{async, IdxFolderLK} = leveled_bookie:book_returnfolder(Bookie1, IdxQ2),
|
||||
KeyList2 = lists:usort(IdxFolderLK()),
|
||||
io:format("List should be last key ~w ~w~n", [LastKey, KeyList2]),
|
||||
true = 1 == length(KeyList2),
|
||||
|
||||
%% Delete the objects from the ChkList removing the indexes
|
||||
lists:foreach(fun({_RN, Obj, Spc}) ->
|
||||
DSpc = lists:map(fun({add, F, T}) ->
|
||||
{remove, F, T}
|
||||
end,
|
||||
Spc),
|
||||
{B, K} =
|
||||
{testutil:get_bucket(Obj), testutil:get_key(Obj)},
|
||||
testutil:book_riakdelete(Bookie1, B, K, DSpc)
|
||||
end,
|
||||
ChkList1),
|
||||
lists:foreach(
|
||||
fun({_RN, Obj, Spc}) ->
|
||||
DSpc =
|
||||
lists:map(fun({add, F, T}) -> {remove, F, T} end, Spc),
|
||||
{B, K} = {testutil:get_bucket(Obj), testutil:get_key(Obj)},
|
||||
testutil:book_riakdelete(Bookie1, B, K, DSpc)
|
||||
end,
|
||||
ChkList1
|
||||
),
|
||||
%% Get the Buckets Keys and Hashes for the whole bucket
|
||||
FoldObjectsFun = fun(B, K, V, Acc) -> [{B, K, erlang:phash2(V)}|Acc]
|
||||
end,
|
||||
FoldObjectsFun =
|
||||
fun(B, K, V, Acc) -> [{B, K, erlang:phash2(V)}|Acc] end,
|
||||
|
||||
{async, HTreeF1} = leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
{async, HTreeF1} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1, ?RIAK_TAG, {FoldObjectsFun, []}, false),
|
||||
|
||||
KeyHashList1 = HTreeF1(),
|
||||
{async, HTreeF2} = leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
"Bucket",
|
||||
all,
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
{async, HTreeF2} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1, ?RIAK_TAG, <<"Bucket">>, all, {FoldObjectsFun, []}, false
|
||||
),
|
||||
KeyHashList2 = HTreeF2(),
|
||||
{async, HTreeF3} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
"Bucket",
|
||||
<<"Bucket">>,
|
||||
{<<"idx1_bin">>, <<"#">>, <<"|">>},
|
||||
{FoldObjectsFun, []},
|
||||
false),
|
||||
|
@ -546,12 +550,13 @@ small_load_with2i(_Config) ->
|
|||
true = 9900 == length(KeyHashList2),
|
||||
true = 9900 == length(KeyHashList3),
|
||||
|
||||
SumIntFun = fun(_B, _K, Obj, Acc) ->
|
||||
{I, _Bin} = testutil:get_value(Obj),
|
||||
Acc + I
|
||||
end,
|
||||
SumIntFun =
|
||||
fun(_B, _K, Obj, Acc) ->
|
||||
{I, _Bin} = testutil:get_value(Obj),
|
||||
Acc + I
|
||||
end,
|
||||
BucketObjQ =
|
||||
{foldobjects_bybucket, ?RIAK_TAG, "Bucket", all, {SumIntFun, 0}, true},
|
||||
{foldobjects_bybucket, ?RIAK_TAG, <<"Bucket">>, all, {SumIntFun, 0}, true},
|
||||
{async, Sum1} = leveled_bookie:book_returnfolder(Bookie1, BucketObjQ),
|
||||
Total1 = Sum1(),
|
||||
io:format("Total from summing all I is ~w~n", [Total1]),
|
||||
|
@ -596,21 +601,18 @@ query_count(_Config) ->
|
|||
BucketBin = list_to_binary("Bucket"),
|
||||
{TestObject, TestSpec} =
|
||||
testutil:generate_testobject(
|
||||
BucketBin, term_to_binary("Key1"), "Value1", [], [{"MDK1", "MDV1"}]),
|
||||
BucketBin, term_to_binary("Key1"), <<"Value1">>, [], [{<<"MDK1">>, <<"MDV1">>}]),
|
||||
ok = testutil:book_riakput(Book1, TestObject, TestSpec),
|
||||
testutil:check_forobject(Book1, TestObject),
|
||||
testutil:check_formissingobject(Book1, "Bucket1", "Key2"),
|
||||
testutil:check_formissingobject(Book1, <<"Bucket1">>, <<"Key2">>),
|
||||
testutil:check_forobject(Book1, TestObject),
|
||||
lists:foreach(
|
||||
fun(_X) ->
|
||||
V = testutil:get_compressiblevalue(),
|
||||
Indexes = testutil:get_randomindexes_generator(8),
|
||||
SW = os:timestamp(),
|
||||
ObjL1 = testutil:generate_objects(10000,
|
||||
binary_uuid,
|
||||
[],
|
||||
V,
|
||||
Indexes),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(10000, binary_uuid, [], V, Indexes),
|
||||
testutil:riakload(Book1, ObjL1),
|
||||
io:format(
|
||||
"Put of 10000 objects with 8 index entries "
|
||||
|
@ -681,15 +683,17 @@ query_count(_Config) ->
|
|||
{true, undefined}},
|
||||
{async,
|
||||
Mia2KFolder2} = leveled_bookie:book_returnfolder(Book2, Query2),
|
||||
Mia2000Count2 = lists:foldl(fun({Term, _Key}, Acc) ->
|
||||
case re:run(Term, RegMia) of
|
||||
nomatch ->
|
||||
Acc;
|
||||
_ ->
|
||||
Acc + 1
|
||||
end end,
|
||||
0,
|
||||
Mia2KFolder2()),
|
||||
Mia2000Count2 =
|
||||
lists:foldl(
|
||||
fun({Term, _Key}, Acc) ->
|
||||
case re:run(Term, RegMia) of
|
||||
nomatch ->
|
||||
Acc;
|
||||
_ ->
|
||||
Acc + 1
|
||||
end end,
|
||||
0,
|
||||
Mia2KFolder2()),
|
||||
ok = case Mia2000Count2 of
|
||||
Mia2000Count1 when Mia2000Count1 > 0 ->
|
||||
io:format("Mia2000 counts match at ~w~n",
|
||||
|
@ -731,20 +735,22 @@ query_count(_Config) ->
|
|||
Spc9Del = lists:map(fun({add, IdxF, IdxT}) -> {remove, IdxF, IdxT} end,
|
||||
Spc9),
|
||||
ok = testutil:book_riakput(Book2, Obj9, Spc9Del),
|
||||
lists:foreach(fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book2, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
Y ->
|
||||
Y = X - 1
|
||||
end
|
||||
end,
|
||||
R9),
|
||||
lists:foreach(
|
||||
fun({IdxF, IdxT, X}) ->
|
||||
Q = {index_query,
|
||||
BucketBin,
|
||||
{fun testutil:foldkeysfun/3, []},
|
||||
{IdxF, IdxT, IdxT},
|
||||
?KEY_ONLY},
|
||||
R = leveled_bookie:book_returnfolder(Book2, Q),
|
||||
{async, Fldr} = R,
|
||||
case length(Fldr()) of
|
||||
Y ->
|
||||
Y = X - 1
|
||||
end
|
||||
end,
|
||||
R9
|
||||
),
|
||||
ok = leveled_bookie:book_close(Book2),
|
||||
{ok, Book3} =
|
||||
leveled_bookie:book_start(
|
||||
|
@ -800,13 +806,13 @@ query_count(_Config) ->
|
|||
|
||||
ObjList10A =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketA"),
|
||||
5000, binary_uuid, [], V9, Indexes9, <<"BucketA">>),
|
||||
ObjList10B =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketB"),
|
||||
5000, binary_uuid, [], V9, Indexes9, <<"BucketB">>),
|
||||
ObjList10C =
|
||||
testutil:generate_objects(
|
||||
5000, binary_uuid, [], V9, Indexes9, "BucketC"),
|
||||
5000, binary_uuid, [], V9, Indexes9, <<"BucketC">>),
|
||||
testutil:riakload(Book4, ObjList10A),
|
||||
testutil:riakload(Book4, ObjList10B),
|
||||
testutil:riakload(Book4, ObjList10C),
|
||||
|
@ -819,10 +825,9 @@ query_count(_Config) ->
|
|||
|
||||
ok = leveled_bookie:book_close(Book4),
|
||||
|
||||
{ok, Book5} = leveled_bookie:book_start(RootPath,
|
||||
2000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
{ok, Book5} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 2000, 50000000, testutil:sync_strategy()),
|
||||
{async, BLF3} = leveled_bookie:book_returnfolder(Book5, BucketListQuery),
|
||||
SW_QC = os:timestamp(),
|
||||
BucketSet3 = BLF3(),
|
||||
|
@ -866,33 +871,25 @@ multibucket_fold(_Config) ->
|
|||
testutil:sync_strategy()),
|
||||
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
||||
IndexGen = fun() -> [] end,
|
||||
ObjL1 = testutil:generate_objects(13000,
|
||||
uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
{<<"Type1">>, <<"Bucket1">>}),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(
|
||||
13000, uuid, [], ObjectGen, IndexGen, {<<"Type1">>, <<"Bucket1">>}
|
||||
),
|
||||
testutil:riakload(Bookie1, ObjL1),
|
||||
ObjL2 = testutil:generate_objects(17000,
|
||||
uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
<<"Bucket2">>),
|
||||
ObjL2 =
|
||||
testutil:generate_objects(
|
||||
17000, uuid, [], ObjectGen, IndexGen, <<"Bucket2">>
|
||||
),
|
||||
testutil:riakload(Bookie1, ObjL2),
|
||||
ObjL3 = testutil:generate_objects(7000,
|
||||
uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
<<"Bucket3">>),
|
||||
ObjL3 =
|
||||
testutil:generate_objects(
|
||||
7000, uuid, [], ObjectGen, IndexGen, <<"Bucket3">>
|
||||
),
|
||||
testutil:riakload(Bookie1, ObjL3),
|
||||
ObjL4 = testutil:generate_objects(23000,
|
||||
uuid,
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
{<<"Type2">>, <<"Bucket4">>}),
|
||||
ObjL4 =
|
||||
testutil:generate_objects(
|
||||
23000, uuid, [], ObjectGen, IndexGen, {<<"Type2">>, <<"Bucket4">>}
|
||||
),
|
||||
testutil:riakload(Bookie1, ObjL4),
|
||||
|
||||
FF = fun(B, K, _PO, Acc) ->
|
||||
|
@ -901,30 +898,30 @@ multibucket_fold(_Config) ->
|
|||
FoldAccT = {FF, []},
|
||||
|
||||
{async, R1} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list,
|
||||
[{<<"Type1">>, <<"Bucket1">>},
|
||||
{<<"Type2">>, <<"Bucket4">>}]},
|
||||
FoldAccT,
|
||||
false,
|
||||
true,
|
||||
false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list,
|
||||
[{<<"Type1">>, <<"Bucket1">>}, {<<"Type2">>, <<"Bucket4">>}]},
|
||||
FoldAccT,
|
||||
false,
|
||||
true,
|
||||
false
|
||||
),
|
||||
|
||||
O1 = length(R1()),
|
||||
io:format("Result R1 of length ~w~n", [O1]),
|
||||
|
||||
{async, R2} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list,
|
||||
[<<"Bucket2">>,
|
||||
<<"Bucket3">>]},
|
||||
{fun(_B, _K, _PO, Acc) ->
|
||||
Acc +1
|
||||
end,
|
||||
0},
|
||||
false, true, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
{bucket_list, [<<"Bucket2">>, <<"Bucket3">>]},
|
||||
{fun(_B, _K, _PO, Acc) -> Acc +1 end, 0},
|
||||
false,
|
||||
true,
|
||||
false
|
||||
),
|
||||
O2 = R2(),
|
||||
io:format("Result R2 of ~w~n", [O2]),
|
||||
|
||||
|
@ -933,10 +930,8 @@ multibucket_fold(_Config) ->
|
|||
|
||||
FoldBucketsFun = fun(B, Acc) -> [B|Acc] end,
|
||||
{async, Folder} =
|
||||
leveled_bookie:book_bucketlist(Bookie1,
|
||||
?RIAK_TAG,
|
||||
{FoldBucketsFun, []},
|
||||
all),
|
||||
leveled_bookie:book_bucketlist(
|
||||
Bookie1, ?RIAK_TAG, {FoldBucketsFun, []}, all),
|
||||
BucketList = lists:reverse(Folder()),
|
||||
ExpectedBucketList =
|
||||
[{<<"Type1">>, <<"Bucket1">>}, {<<"Type2">>, <<"Bucket4">>},
|
||||
|
@ -949,54 +944,53 @@ multibucket_fold(_Config) ->
|
|||
|
||||
rotating_objects(_Config) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket1", 10),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket2", 200),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket3", 800),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket4", 1600),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket5", 3200),
|
||||
ok = testutil:rotating_object_check(RootPath, "Bucket6", 9600),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket1">>, 10),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket2">>, 200),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket3">>, 800),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket4">>, 1600),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket5">>, 3200),
|
||||
ok = testutil:rotating_object_check(RootPath, <<"Bucket6">>, 9600),
|
||||
testutil:reset_filestructure().
|
||||
|
||||
foldobjects_bybucket_range(_Config) ->
|
||||
RootPath = testutil:reset_filestructure(),
|
||||
{ok, Bookie1} = leveled_bookie:book_start(RootPath,
|
||||
2000,
|
||||
50000000,
|
||||
testutil:sync_strategy()),
|
||||
{ok, Bookie1} =
|
||||
leveled_bookie:book_start(
|
||||
RootPath, 2000, 50000000, testutil:sync_strategy()),
|
||||
ObjectGen = testutil:get_compressiblevalue_andinteger(),
|
||||
IndexGen = fun() -> [] end,
|
||||
ObjL1 = testutil:generate_objects(1300,
|
||||
{fixed_binary, 1},
|
||||
[],
|
||||
ObjectGen,
|
||||
IndexGen,
|
||||
<<"Bucket1">>),
|
||||
ObjL1 =
|
||||
testutil:generate_objects(
|
||||
1300, {fixed_binary, 1}, [], ObjectGen, IndexGen, <<"Bucket1">>),
|
||||
testutil:riakload(Bookie1, ObjL1),
|
||||
|
||||
FoldKeysFun = fun(_B, K,_V, Acc) ->
|
||||
[ K |Acc]
|
||||
end,
|
||||
FoldKeysFun = fun(_B, K,_V, Acc) -> [ K |Acc] end,
|
||||
|
||||
StartKey = testutil:fixed_bin_key(123),
|
||||
EndKey = testutil:fixed_bin_key(779),
|
||||
|
||||
{async, Folder} = leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
<<"Bucket1">>,
|
||||
{StartKey, EndKey}, {FoldKeysFun, []},
|
||||
true
|
||||
),
|
||||
{async, Folder} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
<<"Bucket1">>,
|
||||
{StartKey, EndKey},
|
||||
{FoldKeysFun, []},
|
||||
true
|
||||
),
|
||||
ResLen = length(Folder()),
|
||||
io:format("Length of Result of folder ~w~n", [ResLen]),
|
||||
true = 657 == ResLen,
|
||||
|
||||
{async, AllFolder} = leveled_bookie:book_objectfold(Bookie1,
|
||||
?RIAK_TAG,
|
||||
<<"Bucket1">>,
|
||||
all,
|
||||
{FoldKeysFun, []},
|
||||
true
|
||||
),
|
||||
{async, AllFolder} =
|
||||
leveled_bookie:book_objectfold(
|
||||
Bookie1,
|
||||
?RIAK_TAG,
|
||||
<<"Bucket1">>,
|
||||
all,
|
||||
{FoldKeysFun, []},
|
||||
true
|
||||
),
|
||||
|
||||
AllResLen = length(AllFolder()),
|
||||
io:format("Length of Result of all keys folder ~w~n", [AllResLen]),
|
||||
|
|
|
@ -101,7 +101,7 @@ riak_load_tester(Bucket, KeyCount, ObjSize, ProfileList, PM, LC) ->
|
|||
IndexGenFun =
|
||||
fun(ListID) ->
|
||||
fun() ->
|
||||
RandInt = leveled_rand:uniform(IndexCount - 1),
|
||||
RandInt = rand:uniform(IndexCount - 1),
|
||||
IntIndex = ["integer", integer_to_list(ListID), "_int"],
|
||||
BinIndex = ["binary", integer_to_list(ListID), "_bin"],
|
||||
[{add, iolist_to_binary(IntIndex), RandInt},
|
||||
|
@ -434,35 +434,35 @@ rotation_withnocheck(Book, B, NumberOfObjects, ObjSize, IdxCnt) ->
|
|||
Book,
|
||||
B,
|
||||
NumberOfObjects,
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IdxCnt
|
||||
),
|
||||
rotation_with_prefetch(
|
||||
Book,
|
||||
B,
|
||||
NumberOfObjects,
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IdxCnt
|
||||
),
|
||||
rotation_with_prefetch(
|
||||
Book,
|
||||
B,
|
||||
NumberOfObjects,
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IdxCnt
|
||||
),
|
||||
rotation_with_prefetch(
|
||||
Book,
|
||||
B,
|
||||
NumberOfObjects,
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IdxCnt
|
||||
),
|
||||
rotation_with_prefetch(
|
||||
Book,
|
||||
B,
|
||||
NumberOfObjects,
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IdxCnt
|
||||
),
|
||||
ok.
|
||||
|
@ -471,7 +471,7 @@ generate_chunk(CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, (Chunk - 1) * CountPerList + 1}, [],
|
||||
base64:encode(leveled_rand:rand_bytes(ObjSize)),
|
||||
base64:encode(crypto:strong_rand_bytes(ObjSize)),
|
||||
IndexGenFun(Chunk),
|
||||
Bucket
|
||||
).
|
||||
|
@ -480,7 +480,7 @@ load_chunk(Bookie, CountPerList, ObjSize, IndexGenFun, Bucket, Chunk) ->
|
|||
ct:log(?INFO, "Generating and loading ObjList ~w", [Chunk]),
|
||||
time_load_chunk(
|
||||
Bookie,
|
||||
{Bucket, base64:encode(leveled_rand:rand_bytes(ObjSize)), IndexGenFun(Chunk)},
|
||||
{Bucket, base64:encode(crypto:strong_rand_bytes(ObjSize)), IndexGenFun(Chunk)},
|
||||
(Chunk - 1) * CountPerList + 1,
|
||||
Chunk * CountPerList,
|
||||
0,
|
||||
|
@ -577,9 +577,9 @@ random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) ->
|
|||
case I rem 5 of
|
||||
1 ->
|
||||
testutil:fixed_bin_key(
|
||||
Twenty + leveled_rand:uniform(ObjCount - Twenty));
|
||||
Twenty + rand:uniform(ObjCount - Twenty));
|
||||
_ ->
|
||||
testutil:fixed_bin_key(leveled_rand:uniform(Twenty))
|
||||
testutil:fixed_bin_key(rand:uniform(Twenty))
|
||||
end
|
||||
end,
|
||||
{TC, ok} =
|
||||
|
@ -616,18 +616,18 @@ random_fetches(FetchType, Bookie, Bucket, ObjCount, Fetches) ->
|
|||
random_queries(Bookie, Bucket, IDs, IdxCnt, MaxRange, IndexesReturned) ->
|
||||
QueryFun =
|
||||
fun() ->
|
||||
ID = leveled_rand:uniform(IDs),
|
||||
ID = rand:uniform(IDs),
|
||||
BinIndex =
|
||||
iolist_to_binary(["binary", integer_to_list(ID), "_bin"]),
|
||||
Twenty = IdxCnt div 5,
|
||||
RI = leveled_rand:uniform(MaxRange),
|
||||
RI = rand:uniform(MaxRange),
|
||||
[Start, End] =
|
||||
case RI of
|
||||
RI when RI < (MaxRange div 5) ->
|
||||
R0 = leveled_rand:uniform(IdxCnt - (Twenty + RI)),
|
||||
R0 = rand:uniform(IdxCnt - (Twenty + RI)),
|
||||
[R0 + Twenty, R0 + Twenty + RI];
|
||||
_ ->
|
||||
R0 = leveled_rand:uniform(Twenty - RI),
|
||||
R0 = rand:uniform(Twenty - RI),
|
||||
[R0, R0 + RI]
|
||||
end,
|
||||
FoldKeysFun = fun(_B, _K, Cnt) -> Cnt + 1 end,
|
||||
|
|
|
@ -58,7 +58,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
IndexGenFun =
|
||||
fun(ListID) ->
|
||||
fun() ->
|
||||
RandInt = leveled_rand:uniform(IndexCount),
|
||||
RandInt = rand:uniform(IndexCount),
|
||||
ID = integer_to_list(ListID),
|
||||
[{add,
|
||||
list_to_binary("integer" ++ ID ++ "_int"),
|
||||
|
@ -75,7 +75,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
IndexGenFun(1),
|
||||
Bucket
|
||||
),
|
||||
|
@ -83,7 +83,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, CountPerList + 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
IndexGenFun(2),
|
||||
Bucket
|
||||
),
|
||||
|
@ -92,7 +92,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, 2 * CountPerList + 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
IndexGenFun(3),
|
||||
Bucket
|
||||
),
|
||||
|
@ -101,7 +101,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, 3 * CountPerList + 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
IndexGenFun(4),
|
||||
Bucket
|
||||
),
|
||||
|
@ -110,7 +110,7 @@ basic_riak_tester(Bucket, KeyCount) ->
|
|||
testutil:generate_objects(
|
||||
CountPerList,
|
||||
{fixed_binary, 4 * CountPerList + 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
IndexGenFun(5),
|
||||
Bucket
|
||||
),
|
||||
|
@ -276,7 +276,7 @@ summarisable_sstindex(_Config) ->
|
|||
ObjListToSort =
|
||||
lists:map(
|
||||
fun(I) ->
|
||||
{leveled_rand:uniform(KeyCount * 10),
|
||||
{rand:uniform(KeyCount * 10),
|
||||
testutil:set_object(
|
||||
Bucket, KeyGen(I), integer_to_binary(I), IndexGen, [])}
|
||||
end,
|
||||
|
@ -344,7 +344,7 @@ summarisable_sstindex(_Config) ->
|
|||
true = 200 == length(KeyRangeCheckFun(StartKey, EndKey))
|
||||
end,
|
||||
lists:map(
|
||||
fun(_I) -> leveled_rand:uniform(KeyCount - 200) end,
|
||||
fun(_I) -> rand:uniform(KeyCount - 200) end,
|
||||
lists:seq(1, 100))),
|
||||
|
||||
IdxObjKeyCount = 50000,
|
||||
|
@ -367,7 +367,7 @@ summarisable_sstindex(_Config) ->
|
|||
IdxObjListToSort =
|
||||
lists:map(
|
||||
fun(I) ->
|
||||
{leveled_rand:uniform(KeyCount * 10),
|
||||
{rand:uniform(KeyCount * 10),
|
||||
testutil:set_object(
|
||||
Bucket,
|
||||
KeyGen(I),
|
||||
|
@ -419,7 +419,7 @@ summarisable_sstindex(_Config) ->
|
|||
end,
|
||||
lists:map(
|
||||
fun(_I) ->
|
||||
leveled_rand:uniform(IdxObjKeyCount - 20)
|
||||
rand:uniform(IdxObjKeyCount - 20)
|
||||
end,
|
||||
lists:seq(1, 100))),
|
||||
lists:foreach(
|
||||
|
@ -430,7 +430,7 @@ summarisable_sstindex(_Config) ->
|
|||
end,
|
||||
lists:map(
|
||||
fun(_I) ->
|
||||
leveled_rand:uniform(IdxObjKeyCount - 10)
|
||||
rand:uniform(IdxObjKeyCount - 10)
|
||||
end,
|
||||
lists:seq(1, 100))),
|
||||
|
||||
|
@ -451,7 +451,7 @@ summarisable_sstindex(_Config) ->
|
|||
true = 200 == length(KeyRangeCheckFun(StartKey, EndKey))
|
||||
end,
|
||||
lists:map(
|
||||
fun(_I) -> leveled_rand:uniform(KeyCount - 200) end,
|
||||
fun(_I) -> rand:uniform(KeyCount - 200) end,
|
||||
lists:seq(1, 100))),
|
||||
|
||||
ok = leveled_bookie:book_destroy(Bookie1).
|
||||
|
@ -475,7 +475,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
100000,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(32),
|
||||
crypto:strong_rand_bytes(32),
|
||||
fun() -> [] end,
|
||||
<<"BaselineB">>
|
||||
),
|
||||
|
@ -485,7 +485,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
20000,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B0">>
|
||||
),
|
||||
|
@ -498,7 +498,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
15000,
|
||||
{fixed_binary, 20001}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B0">>
|
||||
),
|
||||
|
@ -511,7 +511,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
35000,
|
||||
{fixed_binary, 35001}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B0">>
|
||||
),
|
||||
|
@ -524,7 +524,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
30000,
|
||||
{fixed_binary, 70001}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B0">>
|
||||
),
|
||||
|
@ -537,7 +537,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
8000,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B1">>
|
||||
),
|
||||
|
@ -550,7 +550,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
7000,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(512),
|
||||
crypto:strong_rand_bytes(512),
|
||||
fun() -> [] end,
|
||||
<<"B2">>
|
||||
),
|
||||
|
@ -815,7 +815,7 @@ fetchclocks_modifiedbetween(_Config) ->
|
|||
testutil:generate_objects(
|
||||
200000,
|
||||
{fixed_binary, 1}, [],
|
||||
leveled_rand:rand_bytes(32),
|
||||
crypto:strong_rand_bytes(32),
|
||||
fun() -> [] end,
|
||||
<<"B1.9">>
|
||||
),
|
||||
|
@ -1637,7 +1637,7 @@ bigobject_memorycheck(_Config) ->
|
|||
ObjPutFun =
|
||||
fun(I) ->
|
||||
Key = base64:encode(<<I:32/integer>>),
|
||||
Value = leveled_rand:rand_bytes(1024 * 1024),
|
||||
Value = crypto:strong_rand_bytes(1024 * 1024),
|
||||
% a big object each time!
|
||||
{Obj, Spc} = testutil:set_object(Bucket, Key, Value, IndexGen, []),
|
||||
testutil:book_riakput(Bookie, Obj, Spc)
|
||||
|
|
|
@ -231,12 +231,14 @@ sync_strategy() ->
|
|||
none.
|
||||
|
||||
book_riakput(Pid, RiakObject, IndexSpecs) ->
|
||||
leveled_bookie:book_put(Pid,
|
||||
RiakObject#r_object.bucket,
|
||||
RiakObject#r_object.key,
|
||||
to_binary(v1, RiakObject),
|
||||
IndexSpecs,
|
||||
?RIAK_TAG).
|
||||
leveled_bookie:book_put(
|
||||
Pid,
|
||||
RiakObject#r_object.bucket,
|
||||
RiakObject#r_object.key,
|
||||
to_binary(v1, RiakObject),
|
||||
IndexSpecs,
|
||||
?RIAK_TAG
|
||||
).
|
||||
|
||||
book_tempriakput(Pid, RiakObject, IndexSpecs, TTL) ->
|
||||
leveled_bookie:book_tempput(
|
||||
|
@ -246,7 +248,8 @@ book_tempriakput(Pid, RiakObject, IndexSpecs, TTL) ->
|
|||
to_binary(v1, RiakObject),
|
||||
IndexSpecs,
|
||||
?RIAK_TAG,
|
||||
TTL).
|
||||
TTL
|
||||
).
|
||||
|
||||
book_riakdelete(Pid, Bucket, Key, IndexSpecs) ->
|
||||
leveled_bookie:book_put(Pid, Bucket, Key, delete, IndexSpecs, ?RIAK_TAG).
|
||||
|
@ -383,9 +386,8 @@ wait_for_compaction(Bookie) ->
|
|||
check_bucket_stats(Bookie, Bucket) ->
|
||||
FoldSW1 = os:timestamp(),
|
||||
io:format("Checking bucket size~n"),
|
||||
{async, Folder1} = leveled_bookie:book_returnfolder(Bookie,
|
||||
{riakbucket_stats,
|
||||
Bucket}),
|
||||
{async, Folder1} =
|
||||
leveled_bookie:book_returnfolder(Bookie, {riakbucket_stats, Bucket}),
|
||||
{B1Size, B1Count} = Folder1(),
|
||||
io:format("Bucket fold completed in ~w microseconds~n",
|
||||
[timer:now_diff(os:timestamp(), FoldSW1)]),
|
||||
|
@ -399,28 +401,32 @@ check_forlist(Bookie, ChkList) ->
|
|||
|
||||
check_forlist(Bookie, ChkList, Log) ->
|
||||
SW = os:timestamp(),
|
||||
lists:foreach(fun({_RN, Obj, _Spc}) ->
|
||||
if
|
||||
Log == true ->
|
||||
io:format("Fetching Key ~s~n", [Obj#r_object.key]);
|
||||
true ->
|
||||
ok
|
||||
end,
|
||||
R = book_riakget(Bookie,
|
||||
Obj#r_object.bucket,
|
||||
Obj#r_object.key),
|
||||
true = case R of
|
||||
{ok, Val} ->
|
||||
to_binary(v1, Obj) == Val;
|
||||
not_found ->
|
||||
io:format("Object not found for key ~s~n",
|
||||
[Obj#r_object.key]),
|
||||
error
|
||||
end
|
||||
end,
|
||||
ChkList),
|
||||
io:format("Fetch check took ~w microseconds checking list of length ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SW), length(ChkList)]).
|
||||
lists:foreach(
|
||||
fun({_RN, Obj, _Spc}) ->
|
||||
if
|
||||
Log == true ->
|
||||
io:format("Fetching Key ~s~n", [Obj#r_object.key]);
|
||||
true ->
|
||||
ok
|
||||
end,
|
||||
R = book_riakget(Bookie,
|
||||
Obj#r_object.bucket,
|
||||
Obj#r_object.key),
|
||||
true =
|
||||
case R of
|
||||
{ok, Val} ->
|
||||
to_binary(v1, Obj) == Val;
|
||||
not_found ->
|
||||
io:format("Object not found for key ~s~n",
|
||||
[Obj#r_object.key]),
|
||||
error
|
||||
end
|
||||
end,
|
||||
ChkList),
|
||||
io:format(
|
||||
"Fetch check took ~w microseconds checking list of length ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SW), length(ChkList)]
|
||||
).
|
||||
|
||||
checkhead_forlist(Bookie, ChkList) ->
|
||||
SW = os:timestamp(),
|
||||
|
@ -470,11 +476,14 @@ check_formissingobject(Bookie, Bucket, Key) ->
|
|||
|
||||
|
||||
generate_testobject() ->
|
||||
{B1, K1, V1, Spec1, MD} = {"Bucket1",
|
||||
"Key1",
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{B1, K1, V1, Spec1, MD} =
|
||||
{
|
||||
<<"Bucket1">>,
|
||||
<<"Key1">>,
|
||||
<<"Value1">>,
|
||||
[],
|
||||
[{<<"MDK1">>, <<"MDV1">>}]
|
||||
},
|
||||
generate_testobject(B1, K1, V1, Spec1, MD).
|
||||
|
||||
generate_testobject(B, K, V, Spec, MD) ->
|
||||
|
@ -493,7 +502,7 @@ generate_compressibleobjects(Count, KeyNumber) ->
|
|||
|
||||
|
||||
get_compressiblevalue_andinteger() ->
|
||||
{leveled_rand:uniform(1000), get_compressiblevalue()}.
|
||||
{rand:uniform(1000), get_compressiblevalue()}.
|
||||
|
||||
get_compressiblevalue() ->
|
||||
S1 = "111111111111111",
|
||||
|
@ -510,7 +519,7 @@ get_compressiblevalue() ->
|
|||
iolist_to_binary(
|
||||
lists:foldl(
|
||||
fun(_X, Acc) ->
|
||||
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
|
||||
{_, Str} = lists:keyfind(rand:uniform(8), 1, Selector),
|
||||
[Str|Acc] end,
|
||||
[""],
|
||||
L
|
||||
|
@ -518,28 +527,39 @@ get_compressiblevalue() ->
|
|||
).
|
||||
|
||||
generate_smallobjects(Count, KeyNumber) ->
|
||||
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(512)).
|
||||
generate_objects(Count, KeyNumber, [], crypto:strong_rand_bytes(512)).
|
||||
|
||||
generate_objects(Count, KeyNumber) ->
|
||||
generate_objects(Count, KeyNumber, [], leveled_rand:rand_bytes(4096)).
|
||||
generate_objects(Count, KeyNumber, [], crypto:strong_rand_bytes(4096)).
|
||||
|
||||
|
||||
generate_objects(Count, KeyNumber, ObjL, Value) ->
|
||||
generate_objects(Count, KeyNumber, ObjL, Value, fun() -> [] end).
|
||||
|
||||
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen) ->
|
||||
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, "Bucket").
|
||||
generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, <<"Bucket">>).
|
||||
|
||||
generate_objects(0, _KeyNumber, ObjL, _Value, _IndexGen, _Bucket) ->
|
||||
lists:reverse(ObjL);
|
||||
generate_objects(Count, binary_uuid, ObjL, Value, IndexGen, Bucket) ->
|
||||
{Obj1, Spec1} = set_object(list_to_binary(Bucket),
|
||||
list_to_binary(leveled_util:generate_uuid()),
|
||||
Value,
|
||||
IndexGen),
|
||||
generate_objects(
|
||||
Count, binary_uuid, ObjL, Value, IndexGen, Bucket)
|
||||
when is_list(Bucket) ->
|
||||
generate_objects(
|
||||
Count, binary_uuid, ObjL, Value, IndexGen, list_to_binary(Bucket)
|
||||
);
|
||||
generate_objects(
|
||||
Count, binary_uuid, ObjL, Value, IndexGen, Bucket)
|
||||
when is_binary(Bucket) ->
|
||||
{Obj1, Spec1} =
|
||||
set_object(
|
||||
Bucket,
|
||||
list_to_binary(leveled_util:generate_uuid()),
|
||||
Value,
|
||||
IndexGen
|
||||
),
|
||||
generate_objects(Count - 1,
|
||||
binary_uuid,
|
||||
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
|
@ -550,19 +570,29 @@ generate_objects(Count, uuid, ObjL, Value, IndexGen, Bucket) ->
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
uuid,
|
||||
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
generate_objects(Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket) ->
|
||||
generate_objects(
|
||||
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket)
|
||||
when is_list(Bucket) ->
|
||||
generate_objects(
|
||||
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, list_to_binary(Bucket)
|
||||
);
|
||||
generate_objects(
|
||||
Count, {binary, KeyNumber}, ObjL, Value, IndexGen, Bucket)
|
||||
when is_binary(Bucket) ->
|
||||
{Obj1, Spec1} =
|
||||
set_object(list_to_binary(Bucket),
|
||||
list_to_binary(numbered_key(KeyNumber)),
|
||||
Value,
|
||||
IndexGen),
|
||||
set_object(
|
||||
Bucket,
|
||||
list_to_binary(numbered_key(KeyNumber)),
|
||||
Value,
|
||||
IndexGen
|
||||
),
|
||||
generate_objects(Count - 1,
|
||||
{binary, KeyNumber + 1},
|
||||
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
|
@ -574,7 +604,7 @@ generate_objects(Count, {fixed_binary, KeyNumber}, ObjL, Value, IndexGen, Bucket
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
{fixed_binary, KeyNumber + 1},
|
||||
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
|
@ -585,7 +615,7 @@ generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, Bucket) ->
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
KeyNumber + 1,
|
||||
[{leveled_rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
[{rand:uniform(), Obj1, Spec1}|ObjL],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket).
|
||||
|
@ -652,7 +682,7 @@ update_some_objects(Bookie, ObjList, SampleSize) ->
|
|||
[C] = Obj#r_object.contents,
|
||||
MD = C#r_content.metadata,
|
||||
MD0 = dict:store(?MD_LASTMOD, os:timestamp(), MD),
|
||||
C0 = C#r_content{value = leveled_rand:rand_bytes(512),
|
||||
C0 = C#r_content{value = crypto:strong_rand_bytes(512),
|
||||
metadata = MD0},
|
||||
UpdObj = Obj#r_object{vclock = VC0, contents = [C0]},
|
||||
{R, UpdObj, Spec}
|
||||
|
@ -679,11 +709,11 @@ delete_some_objects(Bookie, ObjList, SampleSize) ->
|
|||
|
||||
generate_vclock() ->
|
||||
lists:map(fun(X) ->
|
||||
{_, Actor} = lists:keyfind(leveled_rand:uniform(10),
|
||||
{_, Actor} = lists:keyfind(rand:uniform(10),
|
||||
1,
|
||||
actor_list()),
|
||||
{Actor, X} end,
|
||||
lists:seq(1, leveled_rand:uniform(8))).
|
||||
lists:seq(1, rand:uniform(8))).
|
||||
|
||||
update_vclock(VC) ->
|
||||
[{Actor, X}|Rest] = VC,
|
||||
|
@ -785,14 +815,14 @@ name_list() ->
|
|||
|
||||
get_randomname() ->
|
||||
NameList = name_list(),
|
||||
N = leveled_rand:uniform(16),
|
||||
N = rand:uniform(16),
|
||||
{N, Name} = lists:keyfind(N, 1, NameList),
|
||||
Name.
|
||||
|
||||
get_randomdate() ->
|
||||
LowTime = 60000000000,
|
||||
HighTime = 70000000000,
|
||||
RandPoint = LowTime + leveled_rand:uniform(HighTime - LowTime),
|
||||
RandPoint = LowTime + rand:uniform(HighTime - LowTime),
|
||||
Date = calendar:gregorian_seconds_to_datetime(RandPoint),
|
||||
{{Year, Month, Day}, {Hour, Minute, Second}} = Date,
|
||||
lists:flatten(io_lib:format("~4..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
|
||||
|
|
|
@ -41,11 +41,14 @@ many_put_compare(_Config) ->
|
|||
{max_pencillercachesize, 16000},
|
||||
{sync_strategy, riak_sync}],
|
||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
{B1, K1, V1, S1, MD} = {"Bucket",
|
||||
"Key1.1.4567.4321",
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{B1, K1, V1, S1, MD} =
|
||||
{
|
||||
<<"Bucket">>,
|
||||
<<"Key1.1.4567.4321">>,
|
||||
<<"Value1">>,
|
||||
[],
|
||||
[{<<"MDK1">>, <<"MDV1">>}]
|
||||
},
|
||||
{TestObject, TestSpec} = testutil:generate_testobject(B1, K1, V1, S1, MD),
|
||||
ok = testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
||||
testutil:check_forobject(Bookie1, TestObject),
|
||||
|
@ -63,12 +66,15 @@ many_put_compare(_Config) ->
|
|||
|
||||
GenList = [2, 20002, 40002, 60002, 80002,
|
||||
100002, 120002, 140002, 160002, 180002],
|
||||
CLs = testutil:load_objects(20000,
|
||||
GenList,
|
||||
Bookie2,
|
||||
TestObject,
|
||||
fun testutil:generate_smallobjects/2,
|
||||
20000),
|
||||
CLs =
|
||||
testutil:load_objects(
|
||||
20000,
|
||||
GenList,
|
||||
Bookie2,
|
||||
TestObject,
|
||||
fun testutil:generate_smallobjects/2,
|
||||
20000
|
||||
),
|
||||
|
||||
% Start a new store, and load the same objects (except fot the original
|
||||
% test object) into this store
|
||||
|
@ -84,7 +90,7 @@ many_put_compare(_Config) ->
|
|||
% state between stores is consistent
|
||||
|
||||
TicTacQ = {tictactree_obj,
|
||||
{o_rkv, "Bucket", null, null, true},
|
||||
{o_rkv, <<"Bucket">>, null, null, true},
|
||||
TreeSize,
|
||||
fun(_B, _K) -> accumulate end},
|
||||
{async, TreeAFolder} = leveled_bookie:book_returnfolder(Bookie2, TicTacQ),
|
||||
|
@ -113,10 +119,13 @@ many_put_compare(_Config) ->
|
|||
true = length(AltList) > 10000,
|
||||
% check there are a significant number of differences from empty
|
||||
|
||||
WrongPartitionTicTacQ = {tictactree_obj,
|
||||
{o_rkv, "Bucket", null, null, false},
|
||||
TreeSize,
|
||||
fun(_B, _K) -> pass end},
|
||||
WrongPartitionTicTacQ =
|
||||
{
|
||||
tictactree_obj,
|
||||
{o_rkv, <<"Bucket">>, null, null, false},
|
||||
TreeSize,
|
||||
fun(_B, _K) -> pass end
|
||||
},
|
||||
{async, TreeAFolder_WP} =
|
||||
leveled_bookie:book_returnfolder(Bookie2, WrongPartitionTicTacQ),
|
||||
TreeAWP = TreeAFolder_WP(),
|
||||
|
@ -151,7 +160,7 @@ many_put_compare(_Config) ->
|
|||
{async, TreeAObjFolder0} =
|
||||
leveled_bookie:book_headfold(Bookie2,
|
||||
o_rkv,
|
||||
{range, "Bucket", all},
|
||||
{range, <<"Bucket">>, all},
|
||||
FoldAccT,
|
||||
false,
|
||||
true,
|
||||
|
@ -170,7 +179,7 @@ many_put_compare(_Config) ->
|
|||
leveled_bookie:book_headfold(
|
||||
Bookie2,
|
||||
?RIAK_TAG,
|
||||
{range, "Bucket", all},
|
||||
{range, <<"Bucket">>, all},
|
||||
{FoldObjectsFun, InitAccTree},
|
||||
true,
|
||||
true,
|
||||
|
@ -188,7 +197,7 @@ many_put_compare(_Config) ->
|
|||
leveled_bookie:book_headfold(
|
||||
Bookie2,
|
||||
?RIAK_TAG,
|
||||
{range, "Bucket", all},
|
||||
{range, <<"Bucket">>, all},
|
||||
{FoldObjectsFun, leveled_tictac:new_tree(0, TreeSize, false)},
|
||||
true,
|
||||
true,
|
||||
|
@ -218,29 +227,38 @@ many_put_compare(_Config) ->
|
|||
end,
|
||||
|
||||
{async, TreeAAltObjFolder0} =
|
||||
leveled_bookie:book_headfold(Bookie2,
|
||||
?RIAK_TAG,
|
||||
{range, "Bucket", all},
|
||||
{AltFoldObjectsFun,
|
||||
InitAccTree},
|
||||
false, true, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie2,
|
||||
?RIAK_TAG,
|
||||
{range, <<"Bucket">>, all},
|
||||
{AltFoldObjectsFun, InitAccTree},
|
||||
false,
|
||||
true,
|
||||
false
|
||||
),
|
||||
SWB2Obj = os:timestamp(),
|
||||
TreeAAltObj = TreeAAltObjFolder0(),
|
||||
io:format("Build tictac tree via object fold with no "++
|
||||
"presence check and 200K objects and alt hash in ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWB2Obj)]),
|
||||
io:format(
|
||||
"Build tictac tree via object fold with no "
|
||||
"presence check and 200K objects and alt hash in ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWB2Obj)]
|
||||
),
|
||||
{async, TreeBAltObjFolder0} =
|
||||
leveled_bookie:book_headfold(Bookie3,
|
||||
?RIAK_TAG,
|
||||
{range, "Bucket", all},
|
||||
{AltFoldObjectsFun,
|
||||
InitAccTree},
|
||||
false, true, false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie3,
|
||||
?RIAK_TAG,
|
||||
{range, <<"Bucket">>, all},
|
||||
{AltFoldObjectsFun, InitAccTree},
|
||||
false,
|
||||
true,
|
||||
false
|
||||
),
|
||||
SWB3Obj = os:timestamp(),
|
||||
TreeBAltObj = TreeBAltObjFolder0(),
|
||||
io:format("Build tictac tree via object fold with no "++
|
||||
"presence check and 200K objects and alt hash in ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWB3Obj)]),
|
||||
io:format(
|
||||
"Build tictac tree via object fold with no "
|
||||
"presence check and 200K objects and alt hash in ~w~n",
|
||||
[timer:now_diff(os:timestamp(), SWB3Obj)]),
|
||||
DL_ExportFold =
|
||||
length(leveled_tictac:find_dirtyleaves(TreeBAltObj, TreeAAltObj)),
|
||||
io:format("Found dirty leaves with exportable comparison of ~w~n",
|
||||
|
@ -261,7 +279,7 @@ many_put_compare(_Config) ->
|
|||
end
|
||||
end
|
||||
end,
|
||||
SegQuery = {keylist, o_rkv, "Bucket", {FoldKeysFun(SegList0), []}},
|
||||
SegQuery = {keylist, o_rkv, <<"Bucket">>, {FoldKeysFun(SegList0), []}},
|
||||
{async, SegKeyFinder} =
|
||||
leveled_bookie:book_returnfolder(Bookie2, SegQuery),
|
||||
SWSKL0 = os:timestamp(),
|
||||
|
@ -273,7 +291,7 @@ many_put_compare(_Config) ->
|
|||
|
||||
true = length(SegKeyList) >= 1,
|
||||
true = length(SegKeyList) < 10,
|
||||
true = lists:member("Key1.1.4567.4321", SegKeyList),
|
||||
true = lists:member(<<"Key1.1.4567.4321">>, SegKeyList),
|
||||
|
||||
% Now remove the object which represents the difference between these
|
||||
% stores and confirm that the tictac trees will now match
|
||||
|
@ -630,20 +648,23 @@ tuplebuckets_headonly(_Config) ->
|
|||
SW1 = os:timestamp(),
|
||||
|
||||
{async, HeadRunner1} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?HEAD_TAG,
|
||||
{bucket_list, BucketList},
|
||||
{FoldHeadFun, []},
|
||||
false, false,
|
||||
false),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?HEAD_TAG,
|
||||
{bucket_list, BucketList},
|
||||
{FoldHeadFun, []},
|
||||
false, false,
|
||||
false
|
||||
),
|
||||
ReturnedObjSpecL1 = lists:reverse(HeadRunner1()),
|
||||
[FirstItem|_Rest] = ReturnedObjSpecL1,
|
||||
LastItem = lists:last(ReturnedObjSpecL1),
|
||||
|
||||
io:format("Returned ~w objects with first ~w and last ~w in ~w ms~n",
|
||||
[length(ReturnedObjSpecL1),
|
||||
FirstItem, LastItem,
|
||||
timer:now_diff(os:timestamp(), SW1)/1000]),
|
||||
io:format(
|
||||
"Returned ~w objects with first ~w and last ~w in ~w ms~n",
|
||||
[length(ReturnedObjSpecL1),
|
||||
FirstItem, LastItem,
|
||||
timer:now_diff(os:timestamp(), SW1)/1000]),
|
||||
|
||||
true = ReturnedObjSpecL1 == lists:sort(ObjectSpecL),
|
||||
|
||||
|
@ -654,12 +675,14 @@ tuplebuckets_headonly(_Config) ->
|
|||
|
||||
SW2 = os:timestamp(),
|
||||
{async, HeadRunner2} =
|
||||
leveled_bookie:book_headfold(Bookie1,
|
||||
?HEAD_TAG,
|
||||
{bucket_list, BucketList},
|
||||
{FoldHeadFun, []},
|
||||
false, false,
|
||||
SegList),
|
||||
leveled_bookie:book_headfold(
|
||||
Bookie1,
|
||||
?HEAD_TAG,
|
||||
{bucket_list, BucketList},
|
||||
{FoldHeadFun, []},
|
||||
false, false,
|
||||
SegList
|
||||
),
|
||||
ReturnedObjSpecL2 = lists:reverse(HeadRunner2()),
|
||||
|
||||
io:format("Returned ~w objects using seglist in ~w ms~n",
|
||||
|
@ -674,7 +697,6 @@ tuplebuckets_headonly(_Config) ->
|
|||
leveled_bookie:book_destroy(Bookie1).
|
||||
|
||||
|
||||
|
||||
basic_headonly(_Config) ->
|
||||
ObjectCount = 200000,
|
||||
RemoveCount = 100,
|
||||
|
@ -694,11 +716,14 @@ basic_headonly_test(ObjectCount, RemoveCount, HeadOnly) ->
|
|||
{head_only, HeadOnly},
|
||||
{max_journalsize, 500000}],
|
||||
{ok, Bookie1} = leveled_bookie:book_start(StartOpts1),
|
||||
{B1, K1, V1, S1, MD} = {"Bucket",
|
||||
"Key1.1.4567.4321",
|
||||
"Value1",
|
||||
[],
|
||||
[{"MDK1", "MDV1"}]},
|
||||
{B1, K1, V1, S1, MD} =
|
||||
{
|
||||
<<"Bucket">>,
|
||||
<<"Key1.1.4567.4321">>,
|
||||
<<"Value1">>,
|
||||
[],
|
||||
[{<<"MDK1">>, <<"MDV1">>}]
|
||||
},
|
||||
{TestObject, TestSpec} = testutil:generate_testobject(B1, K1, V1, S1, MD),
|
||||
{unsupported_message, put} =
|
||||
testutil:book_riakput(Bookie1, TestObject, TestSpec),
|
||||
|
@ -818,23 +843,21 @@ basic_headonly_test(ObjectCount, RemoveCount, HeadOnly) ->
|
|||
false = is_process_alive(AltSnapshot);
|
||||
no_lookup ->
|
||||
{unsupported_message, head} =
|
||||
leveled_bookie:book_head(Bookie1,
|
||||
SegmentID0,
|
||||
{Bucket0, Key0},
|
||||
h),
|
||||
leveled_bookie:book_head(
|
||||
Bookie1, SegmentID0, {Bucket0, Key0}, h),
|
||||
{unsupported_message, head} =
|
||||
leveled_bookie:book_headonly(Bookie1,
|
||||
SegmentID0,
|
||||
Bucket0,
|
||||
Key0),
|
||||
leveled_bookie:book_headonly(
|
||||
Bookie1, SegmentID0, Bucket0, Key0),
|
||||
io:format("Closing actual store ~w~n", [Bookie1]),
|
||||
ok = leveled_bookie:book_close(Bookie1)
|
||||
end,
|
||||
|
||||
{ok, FinalJournals} = file:list_dir(JFP),
|
||||
io:format("Trim has reduced journal count from " ++
|
||||
"~w to ~w and ~w after restart~n",
|
||||
[length(FNs), length(FinalFNs), length(FinalJournals)]),
|
||||
io:format(
|
||||
"Trim has reduced journal count from "
|
||||
"~w to ~w and ~w after restart~n",
|
||||
[length(FNs), length(FinalFNs), length(FinalJournals)]
|
||||
),
|
||||
|
||||
{ok, Bookie2} = leveled_bookie:book_start(StartOpts1),
|
||||
|
||||
|
@ -849,16 +872,12 @@ basic_headonly_test(ObjectCount, RemoveCount, HeadOnly) ->
|
|||
% If we allow HEAD_TAG to be suubject to a lookup, then test this
|
||||
% here
|
||||
{ok, Hash0} =
|
||||
leveled_bookie:book_head(Bookie2,
|
||||
SegmentID0,
|
||||
{Bucket0, Key0},
|
||||
h);
|
||||
leveled_bookie:book_head(
|
||||
Bookie2, SegmentID0, {Bucket0, Key0}, h);
|
||||
no_lookup ->
|
||||
{unsupported_message, head} =
|
||||
leveled_bookie:book_head(Bookie2,
|
||||
SegmentID0,
|
||||
{Bucket0, Key0},
|
||||
h)
|
||||
leveled_bookie:book_head(
|
||||
Bookie2, SegmentID0, {Bucket0, Key0}, h)
|
||||
end,
|
||||
|
||||
RemoveSpecL0 = lists:sublist(ObjectSpecL, RemoveCount),
|
||||
|
@ -873,12 +892,9 @@ basic_headonly_test(ObjectCount, RemoveCount, HeadOnly) ->
|
|||
true = AccC3 == (ObjectCount - RemoveCount),
|
||||
false = AccH3 == AccH2,
|
||||
|
||||
|
||||
ok = leveled_bookie:book_close(Bookie2).
|
||||
|
||||
|
||||
|
||||
|
||||
load_objectspecs([], _SliceSize, _Bookie) ->
|
||||
ok;
|
||||
load_objectspecs(ObjectSpecL, SliceSize, Bookie)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue