Add compatibility for old and new random / rand functions
This commit is contained in:
parent
a49d3bbfec
commit
25389893cf
22 changed files with 125 additions and 74 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -1,3 +1,4 @@
|
|||
*.beam
|
||||
/.eunit
|
||||
/_build
|
||||
/_build
|
||||
*~
|
||||
|
|
10
rebar.config
Normal file
10
rebar.config
Normal file
|
@ -0,0 +1,10 @@
|
|||
{erl_opts, [warnings_as_errors,
|
||||
{platform_define, "18", old_rand},
|
||||
{platform_define, "17", old_rand},
|
||||
{platform_define, "^R", old_rand}]}.
|
||||
|
||||
{profiles,
|
||||
[{eqc, [{deps, [meck, fqc]},
|
||||
{erl_opts, [debug_info, {parse_transform, lager_transform}, {parse_transform, eqc_cover}]},
|
||||
{plugins, [rebar_eqc]}]}
|
||||
]}.
|
|
@ -379,8 +379,7 @@ book_destroy(Pid) ->
|
|||
%%%============================================================================
|
||||
|
||||
init([Opts]) ->
|
||||
SW = os:timestamp(),
|
||||
random:seed(erlang:phash2(self()), element(2, SW), element(3, SW)),
|
||||
leveled_rand:seed(),
|
||||
case get_opt(snapshot_bookie, Opts) of
|
||||
undefined ->
|
||||
% Start from file not snapshot
|
||||
|
@ -1248,7 +1247,7 @@ get_hashaccumulator(JournalCheck, InkerClone, AddKeyFun) ->
|
|||
case leveled_codec:is_active(LK, V, Now) of
|
||||
true ->
|
||||
{B, K, H} = leveled_codec:get_keyandobjhash(LK, V),
|
||||
Check = random:uniform() < ?CHECKJOURNAL_PROB,
|
||||
Check = leveled_rand:uniform() < ?CHECKJOURNAL_PROB,
|
||||
case {JournalCheck, Check} of
|
||||
{check_presence, true} ->
|
||||
case check_presence(LK, V, InkerClone) of
|
||||
|
@ -1459,7 +1458,7 @@ maybepush_ledgercache(MaxCacheSize, Cache, Penciller) ->
|
|||
maybe_withjitter(CacheSize, MaxCacheSize) ->
|
||||
if
|
||||
CacheSize > MaxCacheSize ->
|
||||
R = random:uniform(7 * MaxCacheSize),
|
||||
R = leveled_rand:uniform(7 * MaxCacheSize),
|
||||
if
|
||||
(CacheSize - MaxCacheSize) > R ->
|
||||
true;
|
||||
|
@ -1544,7 +1543,7 @@ generate_multiple_objects(0, _KeyNumber, ObjL) ->
|
|||
ObjL;
|
||||
generate_multiple_objects(Count, KeyNumber, ObjL) ->
|
||||
Key = "Key" ++ integer_to_list(KeyNumber),
|
||||
Value = crypto:rand_bytes(256),
|
||||
Value = leveled_rand:rand_bytes(256),
|
||||
IndexSpec = [{add, "idx1_bin", "f" ++ integer_to_list(KeyNumber rem 10)}],
|
||||
generate_multiple_objects(Count - 1,
|
||||
KeyNumber + 1,
|
||||
|
|
|
@ -231,7 +231,7 @@ cdb_getpositions(Pid, SampleSize) ->
|
|||
cdb_getpositions_fromidx(Pid, S0, Index, Acc)
|
||||
end
|
||||
end,
|
||||
RandFun = fun(X) -> {random:uniform(), X} end,
|
||||
RandFun = fun(X) -> {leveled_rand:uniform(), X} end,
|
||||
SeededL = lists:map(RandFun, lists:seq(0, 255)),
|
||||
SortedL = lists:keysort(1, SeededL),
|
||||
lists:foldl(FoldFun, [], SortedL)
|
||||
|
|
|
@ -117,7 +117,7 @@ to_lookup(Key) ->
|
|||
%% Credit to
|
||||
%% https://github.com/afiskon/erlang-uuid-v4/blob/master/src/uuid.erl
|
||||
generate_uuid() ->
|
||||
<<A:32, B:16, C:16, D:16, E:48>> = crypto:rand_bytes(16),
|
||||
<<A:32, B:16, C:16, D:16, E:48>> = leveled_rand:rand_bytes(16),
|
||||
L = io_lib:format("~8.16.0b-~4.16.0b-4~3.16.0b-~4.16.0b-~12.16.0b",
|
||||
[A, B, C band 16#0fff, D band 16#3fff bor 16#8000, E]),
|
||||
binary_to_list(list_to_binary(L)).
|
||||
|
@ -764,7 +764,7 @@ corrupted_inker_tag_test() ->
|
|||
%% Maybe 5 microseconds per hash
|
||||
|
||||
hashperf_test() ->
|
||||
OL = lists:map(fun(_X) -> crypto:rand_bytes(8192) end, lists:seq(1, 1000)),
|
||||
OL = lists:map(fun(_X) -> leveled_rand:rand_bytes(8192) end, lists:seq(1, 1000)),
|
||||
SW = os:timestamp(),
|
||||
_HL = lists:map(fun(Obj) -> erlang:phash2(Obj) end, OL),
|
||||
io:format(user, "1000 object hashes in ~w microseconds~n",
|
||||
|
@ -859,4 +859,4 @@ genaaeidx_test() ->
|
|||
AAESpecsB2 = aae_indexspecs(AAE1, <<"Bucket0">>, Key, SQN, H, LastMods1),
|
||||
?assertMatch(0, length(AAESpecsB2)).
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
|
@ -275,8 +275,8 @@ schedule_compaction(CompactionHours, RunsPerDay, CurrentTS) ->
|
|||
% today.
|
||||
RandSelect =
|
||||
fun(_X) ->
|
||||
{lists:nth(random:uniform(TotalHours), CompactionHours),
|
||||
random:uniform(?INTERVALS_PER_HOUR)}
|
||||
{lists:nth(leveled_rand:uniform(TotalHours), CompactionHours),
|
||||
leveled_rand:uniform(?INTERVALS_PER_HOUR)}
|
||||
end,
|
||||
RandIntervals = lists:sort(lists:map(RandSelect,
|
||||
lists:seq(1, RunsPerDay))),
|
||||
|
@ -300,7 +300,7 @@ schedule_compaction(CompactionHours, RunsPerDay, CurrentTS) ->
|
|||
|
||||
% Calculate the offset in seconds to this next interval
|
||||
NextS0 = NextI * (IntervalLength * 60)
|
||||
- random:uniform(IntervalLength * 60),
|
||||
- leveled_rand:uniform(IntervalLength * 60),
|
||||
NextM = NextS0 div 60,
|
||||
NextS = NextS0 rem 60,
|
||||
TimeDiff = calendar:time_difference(LocalTime,
|
||||
|
@ -932,7 +932,7 @@ compact_singlefile_totwosmallfiles_test() ->
|
|||
{ok, CDB1} = leveled_cdb:cdb_open_writer(FN1, CDBoptsLarge),
|
||||
lists:foreach(fun(X) ->
|
||||
LK = test_ledgerkey("Key" ++ integer_to_list(X)),
|
||||
Value = crypto:rand_bytes(1024),
|
||||
Value = leveled_rand:rand_bytes(1024),
|
||||
{IK, IV} = leveled_codec:to_inkerkv(LK, X, Value, []),
|
||||
ok = leveled_cdb:cdb_put(CDB1, IK, IV)
|
||||
end,
|
||||
|
|
|
@ -243,7 +243,7 @@ buildrandomfashion_test() ->
|
|||
ManL0 = build_testmanifest_aslist(),
|
||||
RandMapFun =
|
||||
fun(X) ->
|
||||
{random:uniform(), X}
|
||||
{leveled_rand:uniform(), X}
|
||||
end,
|
||||
ManL1 = lists:map(RandMapFun, ManL0),
|
||||
ManL2 = lists:sort(ManL1),
|
||||
|
@ -257,7 +257,7 @@ buildrandomfashion_test() ->
|
|||
test_testmanifest(Man0),
|
||||
?assertMatch(ManL0, to_list(Man0)),
|
||||
|
||||
RandomEntry = lists:nth(random:uniform(50), ManL0),
|
||||
RandomEntry = lists:nth(leveled_rand:uniform(50), ManL0),
|
||||
Man1 = remove_entry(Man0, RandomEntry),
|
||||
Man2 = add_entry(Man1, RandomEntry, false),
|
||||
|
||||
|
@ -271,4 +271,4 @@ empty_active_journal_test() ->
|
|||
?assertMatch([], generate_entry(ActJ)),
|
||||
?assertMatch(ok, file:delete(Path ++ "test_emptyactive_file.cdb")).
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
|
@ -344,8 +344,7 @@ ink_printmanifest(Pid) ->
|
|||
%%%============================================================================
|
||||
|
||||
init([InkerOpts]) ->
|
||||
SW = os:timestamp(),
|
||||
random:seed(erlang:phash2(self()), element(2, SW), element(3, SW)),
|
||||
leveled_rand:seed(),
|
||||
case {InkerOpts#inker_options.root_path,
|
||||
InkerOpts#inker_options.start_snapshot} of
|
||||
{undefined, true} ->
|
||||
|
@ -989,7 +988,7 @@ compact_journal_test() ->
|
|||
PK = "KeyZ" ++ integer_to_list(X),
|
||||
{ok, SQN, _} = ink_put(Ink1,
|
||||
test_ledgerkey(PK),
|
||||
crypto:rand_bytes(10000),
|
||||
leveled_rand:rand_bytes(10000),
|
||||
{[], infinity}),
|
||||
{SQN, test_ledgerkey(PK)}
|
||||
end,
|
||||
|
|
|
@ -374,7 +374,7 @@ log_timer(LogRef, Subs, StartTime, SupportedLogLevels) ->
|
|||
end.
|
||||
|
||||
log_randomtimer(LogReference, Subs, StartTime, RandomProb) ->
|
||||
R = random:uniform(),
|
||||
R = leveled_rand:uniform(),
|
||||
case R < RandomProb of
|
||||
true ->
|
||||
log_timer(LogReference, Subs, StartTime);
|
||||
|
@ -388,7 +388,7 @@ log_randomtimer(LogReference, Subs, StartTime, RandomProb) ->
|
|||
put_timing(_Actor, undefined, T0, T1) ->
|
||||
{1, {T0, T1}, {T0, T1}};
|
||||
put_timing(Actor, {?PUT_LOGPOINT, {Total0, Total1}, {Max0, Max1}}, T0, T1) ->
|
||||
RN = random:uniform(?PUT_LOGPOINT),
|
||||
RN = leveled_rand:uniform(?PUT_LOGPOINT),
|
||||
case RN > ?PUT_LOGPOINT div 2 of
|
||||
true ->
|
||||
% log at the timing point less than half the time
|
||||
|
@ -434,7 +434,7 @@ head_timing_int(undefined, T0, Level, R) ->
|
|||
end end,
|
||||
{1, lists:foldl(NewDFun, dict:new(), head_keylist())};
|
||||
head_timing_int({?HEAD_LOGPOINT, HeadTimingD}, T0, Level, R) ->
|
||||
RN = random:uniform(?HEAD_LOGPOINT),
|
||||
RN = leveled_rand:uniform(?HEAD_LOGPOINT),
|
||||
case RN > ?HEAD_LOGPOINT div 2 of
|
||||
true ->
|
||||
% log at the timing point less than half the time
|
||||
|
@ -533,7 +533,7 @@ gen_timing_int(undefined, T0, TimerType, KeyListFun, _LogPoint, _LogRef) ->
|
|||
{1, lists:foldl(NewDFun, dict:new(), KeyListFun())};
|
||||
gen_timing_int({LogPoint, TimerD}, T0, TimerType, KeyListFun, LogPoint,
|
||||
LogRef) ->
|
||||
RN = random:uniform(LogPoint),
|
||||
RN = leveled_rand:uniform(LogPoint),
|
||||
case RN > LogPoint div 2 of
|
||||
true ->
|
||||
% log at the timing point less than half the time
|
||||
|
@ -597,4 +597,4 @@ log_warn_test() ->
|
|||
ok = log_timer("G0001", [], os:timestamp(), [warn, error]),
|
||||
ok = log_timer("G8888", [], os:timestamp(), [info, warn, error]).
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
|
@ -248,9 +248,9 @@ generate_randomkeys(Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
generate_randomkeys(0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
generate_randomkeys(Count, Acc, BucketLow, BRange) ->
|
||||
BNumber = string:right(integer_to_list(BucketLow + random:uniform(BRange)),
|
||||
BNumber = string:right(integer_to_list(BucketLow + leveled_rand:uniform(BRange)),
|
||||
4, $0),
|
||||
KNumber = string:right(integer_to_list(random:uniform(1000)), 4, $0),
|
||||
KNumber = string:right(integer_to_list(leveled_rand:uniform(1000)), 4, $0),
|
||||
K = {o, "Bucket" ++ BNumber, "Key" ++ KNumber},
|
||||
RandKey = {K, {Count + 1,
|
||||
{active, infinity},
|
||||
|
|
|
@ -453,8 +453,7 @@ pcl_doom(Pid) ->
|
|||
%%%============================================================================
|
||||
|
||||
init([PCLopts]) ->
|
||||
SW = os:timestamp(),
|
||||
random:seed(erlang:phash2(self()), element(2, SW), element(3, SW)),
|
||||
leveled_rand:seed(),
|
||||
case {PCLopts#penciller_options.root_path,
|
||||
PCLopts#penciller_options.start_snapshot,
|
||||
PCLopts#penciller_options.snapshot_query,
|
||||
|
@ -921,7 +920,7 @@ update_levelzero(L0Size, {PushedTree, PushedIdx, MinSQN, MaxSQN},
|
|||
RandomFactor =
|
||||
case State#state.levelzero_cointoss of
|
||||
true ->
|
||||
case random:uniform(?COIN_SIDECOUNT) of
|
||||
case leveled_rand:uniform(?COIN_SIDECOUNT) of
|
||||
1 ->
|
||||
true;
|
||||
_ ->
|
||||
|
@ -1275,8 +1274,8 @@ generate_randomkeys(0, _SQN, Acc) ->
|
|||
lists:reverse(Acc);
|
||||
generate_randomkeys(Count, SQN, Acc) ->
|
||||
K = {o,
|
||||
lists:concat(["Bucket", random:uniform(1024)]),
|
||||
lists:concat(["Key", random:uniform(1024)]),
|
||||
lists:concat(["Bucket", leveled_rand:uniform(1024)]),
|
||||
lists:concat(["Key", leveled_rand:uniform(1024)]),
|
||||
null},
|
||||
RandKey = {K,
|
||||
{SQN,
|
||||
|
|
|
@ -330,11 +330,11 @@ merge_lookup(Manifest, LevelIdx, StartKey, EndKey) ->
|
|||
%% Hence, the initial implementation is to select files to merge at random
|
||||
mergefile_selector(Manifest, LevelIdx) when LevelIdx =< 1 ->
|
||||
Level = array:get(LevelIdx, Manifest#manifest.levels),
|
||||
lists:nth(random:uniform(length(Level)), Level);
|
||||
lists:nth(leveled_rand:uniform(length(Level)), Level);
|
||||
mergefile_selector(Manifest, LevelIdx) ->
|
||||
Level = leveled_tree:to_list(array:get(LevelIdx,
|
||||
Manifest#manifest.levels)),
|
||||
{_SK, ME} = lists:nth(random:uniform(length(Level)), Level),
|
||||
{_SK, ME} = lists:nth(leveled_rand:uniform(length(Level)), Level),
|
||||
ME.
|
||||
|
||||
-spec merge_snapshot(manifest(), manifest()) -> manifest().
|
||||
|
@ -924,7 +924,7 @@ ext_keylookup_manifest_test() ->
|
|||
{ok, BytesCopied} = file:copy(Man7FN, Man7FNAlt),
|
||||
{ok, Bin} = file:read_file(Man7FN),
|
||||
?assertMatch(BytesCopied, byte_size(Bin)),
|
||||
RandPos = random:uniform(bit_size(Bin) - 1),
|
||||
RandPos = leveled_rand:uniform(bit_size(Bin) - 1),
|
||||
<<Pre:RandPos/bitstring, BitToFlip:1/integer, Rest/bitstring>> = Bin,
|
||||
Flipped = BitToFlip bxor 1,
|
||||
ok = file:write_file(Man7FN,
|
||||
|
|
|
@ -254,9 +254,9 @@ generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
||||
BNumber = string:right(integer_to_list(BucketLow + random:uniform(BRange)),
|
||||
BNumber = string:right(integer_to_list(BucketLow + leveled_rand:uniform(BRange)),
|
||||
4, $0),
|
||||
KNumber = string:right(integer_to_list(random:uniform(1000)), 4, $0),
|
||||
KNumber = string:right(integer_to_list(leveled_rand:uniform(1000)), 4, $0),
|
||||
{K, V} = {{o, "Bucket" ++ BNumber, "Key" ++ KNumber, null},
|
||||
{Seqn, {active, infinity}, null}},
|
||||
generate_randomkeys(Seqn + 1,
|
||||
|
@ -392,4 +392,4 @@ with_index_test2() ->
|
|||
_R1 = lists:foldl(CheckFun, {L0Index, TreeList}, SrcKVL).
|
||||
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
43
src/leveled_rand.erl
Normal file
43
src/leveled_rand.erl
Normal file
|
@ -0,0 +1,43 @@
|
|||
%% Generalized random module that offers a backwards compatible API
|
||||
%% around some of the changes in rand, crypto and for time units.
|
||||
|
||||
-module(leveled_rand).
|
||||
|
||||
%% API
|
||||
-export([
|
||||
uniform/1,
|
||||
seed/0,
|
||||
rand_bytes/1
|
||||
]).
|
||||
|
||||
%%%===================================================================
|
||||
%%% New (r19+) rand style functions
|
||||
%%%===================================================================
|
||||
-ifndef(old_rand).
|
||||
uniform(N) ->
|
||||
rand:uniform(N).
|
||||
|
||||
seed() ->
|
||||
ok.
|
||||
|
||||
rand_bytes(Size) ->
|
||||
crypto:strong_rand_bytes(Size).
|
||||
|
||||
-else.
|
||||
%%%===================================================================
|
||||
%%% Old (r18) random style functions
|
||||
%%%===================================================================
|
||||
uniform(N) ->
|
||||
random:uniform(N).
|
||||
|
||||
uniform_s(N, State) ->
|
||||
random:uniform_s(N, State).
|
||||
|
||||
seed() ->
|
||||
SW = os:timestamp(),
|
||||
random:seed(erlang:phash2(self()), element(2, SW), element(3, SW)).
|
||||
|
||||
rand_bytes(Size) ->
|
||||
crypto:strong_rand(Size).
|
||||
|
||||
-endif.
|
|
@ -497,7 +497,7 @@ delete_pending(timeout, State) ->
|
|||
self()),
|
||||
% If the next thing is another timeout - may be long-running snapshot, so
|
||||
% back-off
|
||||
{next_state, delete_pending, State, random:uniform(10) * ?DELETE_TIMEOUT};
|
||||
{next_state, delete_pending, State, leveled_rand:uniform(10) * ?DELETE_TIMEOUT};
|
||||
delete_pending(close, State) ->
|
||||
leveled_log:log("SST07", [State#state.filename]),
|
||||
ok = file:close(State#state.handle),
|
||||
|
@ -1478,11 +1478,11 @@ generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
||||
BRand = random:uniform(BRange),
|
||||
BRand = leveled_rand:uniform(BRange),
|
||||
BNumber = string:right(integer_to_list(BucketLow + BRand), 4, $0),
|
||||
KNumber = string:right(integer_to_list(random:uniform(1000)), 6, $0),
|
||||
KNumber = string:right(integer_to_list(leveled_rand:uniform(1000)), 6, $0),
|
||||
LK = leveled_codec:to_ledgerkey("Bucket" ++ BNumber, "Key" ++ KNumber, o),
|
||||
Chunk = crypto:rand_bytes(64),
|
||||
Chunk = leveled_rand:rand_bytes(64),
|
||||
{_B, _K, MV, _H, _LMs} =
|
||||
leveled_codec:generate_ledgerkv(LK, Seqn, Chunk, 64, infinity),
|
||||
generate_randomkeys(Seqn + 1,
|
||||
|
@ -1498,7 +1498,7 @@ generate_indexkeys(Count) ->
|
|||
generate_indexkeys(0, IndexList) ->
|
||||
IndexList;
|
||||
generate_indexkeys(Count, IndexList) ->
|
||||
Changes = generate_indexkey(random:uniform(8000), Count),
|
||||
Changes = generate_indexkey(leveled_rand:uniform(8000), Count),
|
||||
generate_indexkeys(Count - 1, IndexList ++ Changes).
|
||||
|
||||
generate_indexkey(Term, Count) ->
|
||||
|
@ -1671,7 +1671,7 @@ indexed_list_mixedkeys_bitflip_test() ->
|
|||
{_PosBinIndex1, FullBin, _HL, LK} = generate_binary_slot(lookup, Keys),
|
||||
?assertMatch(LK, element(1, lists:last(Keys))),
|
||||
L = byte_size(FullBin),
|
||||
Byte1 = random:uniform(L),
|
||||
Byte1 = leveled_rand:uniform(L),
|
||||
<<PreB1:Byte1/binary, A:8/integer, PostByte1/binary>> = FullBin,
|
||||
FullBin0 =
|
||||
case A of
|
||||
|
|
|
@ -235,11 +235,11 @@ generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
||||
BRand = random:uniform(BRange),
|
||||
BRand = leveled_rand:uniform(BRange),
|
||||
BNumber = string:right(integer_to_list(BucketLow + BRand), 4, $0),
|
||||
KNumber = string:right(integer_to_list(random:uniform(10000)), 6, $0),
|
||||
KNumber = string:right(integer_to_list(leveled_rand:uniform(10000)), 6, $0),
|
||||
LK = leveled_codec:to_ledgerkey("Bucket" ++ BNumber, "Key" ++ KNumber, o),
|
||||
Chunk = crypto:rand_bytes(64),
|
||||
Chunk = leveled_rand:rand_bytes(64),
|
||||
{_B, _K, MV, _H, _LMs} =
|
||||
leveled_codec:generate_ledgerkv(LK, Seqn, Chunk, 64, infinity),
|
||||
generate_randomkeys(Seqn + 1,
|
||||
|
@ -333,4 +333,4 @@ test_bloom(N) ->
|
|||
|
||||
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
|
@ -570,9 +570,9 @@ generate_randomkeys(Seqn, Count, BucketRangeLow, BucketRangeHigh) ->
|
|||
generate_randomkeys(_Seqn, 0, Acc, _BucketLow, _BucketHigh) ->
|
||||
Acc;
|
||||
generate_randomkeys(Seqn, Count, Acc, BucketLow, BRange) ->
|
||||
BRand = random:uniform(BRange),
|
||||
BRand = leveled_rand:uniform(BRange),
|
||||
BNumber = string:right(integer_to_list(BucketLow + BRand), 4, $0),
|
||||
KNumber = string:right(integer_to_list(random:uniform(1000)), 4, $0),
|
||||
KNumber = string:right(integer_to_list(leveled_rand:uniform(1000)), 4, $0),
|
||||
{K, V} = {{o, "Bucket" ++ BNumber, "Key" ++ KNumber, null},
|
||||
{Seqn, {active, infinity}, null}},
|
||||
generate_randomkeys(Seqn + 1,
|
||||
|
@ -804,4 +804,4 @@ empty_test() ->
|
|||
T2 = empty(idxt),
|
||||
?assertMatch(0, tsize(T2)).
|
||||
|
||||
-endif.
|
||||
-endif.
|
||||
|
|
|
@ -424,4 +424,4 @@ restart_from_blankledger(BookOpts, B_SpcL) ->
|
|||
end,
|
||||
B_SpcL),
|
||||
ok = leveled_bookie:book_close(Book1),
|
||||
ok.
|
||||
ok.
|
||||
|
|
|
@ -298,7 +298,7 @@ generate_compressibleobjects(Count, KeyNumber) ->
|
|||
|
||||
|
||||
get_compressiblevalue_andinteger() ->
|
||||
{random:uniform(1000), get_compressiblevalue()}.
|
||||
{leveled_rand:uniform(1000), get_compressiblevalue()}.
|
||||
|
||||
get_compressiblevalue() ->
|
||||
S1 = "111111111111111",
|
||||
|
@ -313,7 +313,7 @@ get_compressiblevalue() ->
|
|||
{5, S5}, {6, S6}, {7, S7}, {8, S8}],
|
||||
L = lists:seq(1, 1024),
|
||||
lists:foldl(fun(_X, Acc) ->
|
||||
{_, Str} = lists:keyfind(random:uniform(8), 1, Selector),
|
||||
{_, Str} = lists:keyfind(leveled_rand:uniform(8), 1, Selector),
|
||||
Acc ++ Str end,
|
||||
"",
|
||||
L).
|
||||
|
@ -340,7 +340,7 @@ generate_objects(Count, binary_uuid, ObjL, Value, IndexGen, Bucket) ->
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
binary_uuid,
|
||||
ObjL ++ [{random:uniform(), Obj1, Spec1}],
|
||||
ObjL ++ [{leveled_rand:uniform(), Obj1, Spec1}],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
|
@ -351,7 +351,7 @@ generate_objects(Count, uuid, ObjL, Value, IndexGen, Bucket) ->
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
uuid,
|
||||
ObjL ++ [{random:uniform(), Obj1, Spec1}],
|
||||
ObjL ++ [{leveled_rand:uniform(), Obj1, Spec1}],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket);
|
||||
|
@ -362,7 +362,7 @@ generate_objects(Count, KeyNumber, ObjL, Value, IndexGen, Bucket) ->
|
|||
IndexGen),
|
||||
generate_objects(Count - 1,
|
||||
KeyNumber + 1,
|
||||
ObjL ++ [{random:uniform(), Obj1, Spec1}],
|
||||
ObjL ++ [{leveled_rand:uniform(), Obj1, Spec1}],
|
||||
Value,
|
||||
IndexGen,
|
||||
Bucket).
|
||||
|
@ -392,11 +392,11 @@ set_object(Bucket, Key, Value, IndexGen, Indexes2Remove) ->
|
|||
|
||||
generate_vclock() ->
|
||||
lists:map(fun(X) ->
|
||||
{_, Actor} = lists:keyfind(random:uniform(10),
|
||||
{_, Actor} = lists:keyfind(leveled_rand:uniform(10),
|
||||
1,
|
||||
actor_list()),
|
||||
{Actor, X} end,
|
||||
lists:seq(1, random:uniform(8))).
|
||||
lists:seq(1, leveled_rand:uniform(8))).
|
||||
|
||||
|
||||
actor_list() ->
|
||||
|
@ -469,14 +469,14 @@ name_list() ->
|
|||
|
||||
get_randomname() ->
|
||||
NameList = name_list(),
|
||||
N = random:uniform(16),
|
||||
N = leveled_rand:uniform(16),
|
||||
{N, Name} = lists:keyfind(N, 1, NameList),
|
||||
Name.
|
||||
|
||||
get_randomdate() ->
|
||||
LowTime = 60000000000,
|
||||
HighTime = 70000000000,
|
||||
RandPoint = LowTime + random:uniform(HighTime - LowTime),
|
||||
RandPoint = LowTime + leveled_rand:uniform(HighTime - LowTime),
|
||||
Date = calendar:gregorian_seconds_to_datetime(RandPoint),
|
||||
{{Year, Month, Day}, {Hour, Minute, Second}} = Date,
|
||||
lists:flatten(io_lib:format("~4..0w~2..0w~2..0w~2..0w~2..0w~2..0w",
|
||||
|
|
|
@ -1039,4 +1039,4 @@ get_tictactree_fun(Bookie, Bucket, TreeSize) ->
|
|||
"~w microseconds~n",
|
||||
[LMD, timer:now_diff(os:timestamp(), SW)]),
|
||||
leveled_tictac:merge_trees(R, Acc)
|
||||
end.
|
||||
end.
|
||||
|
|
|
@ -41,7 +41,7 @@ go_dict(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_dict(D, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
dict:find(LookupHash, D),
|
||||
|
||||
|
@ -71,7 +71,7 @@ go_ets(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_ets(Ets, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
ets:lookup(Ets, LookupHash),
|
||||
|
||||
|
@ -95,7 +95,7 @@ go_gbtree(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_gbtree(Tree, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
gb_trees:lookup(LookupHash, Tree),
|
||||
|
||||
|
@ -134,7 +134,7 @@ go_arrayofdict(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_arrayofdict(Array, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
LookupIndex = hash_to_index(LookupHash),
|
||||
dict:find(LookupHash, array:get(LookupIndex, Array)),
|
||||
|
@ -177,7 +177,7 @@ go_arrayofgbtree(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_arrayofgbtree(Array, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
LookupIndex = hash_to_index(LookupHash),
|
||||
gb_trees:lookup(LookupHash, array:get(LookupIndex, Array)),
|
||||
|
@ -212,7 +212,7 @@ go_arrayofdict_withcache(_, 0, _) ->
|
|||
{erlang:memory(), statistics(garbage_collection)};
|
||||
go_arrayofdict_withcache({MArray, CArray}, N, M) ->
|
||||
% Lookup a random key - which may not be present
|
||||
LookupKey = lists:concat(["key-", random:uniform(M)]),
|
||||
LookupKey = lists:concat(["key-", leveled_rand:uniform(M)]),
|
||||
LookupHash = hash(LookupKey),
|
||||
LookupIndex = hash_to_index(LookupHash),
|
||||
dict:find(LookupHash, array:get(LookupIndex, CArray)),
|
||||
|
@ -263,10 +263,10 @@ create_block(N, BlockType, KeyStruct) ->
|
|||
20 ->
|
||||
Key = lists:concat(["key-20-special"]);
|
||||
_ ->
|
||||
Key = lists:concat(["key-", N, "-", random:uniform(1000)])
|
||||
Key = lists:concat(["key-", N, "-", leveled_rand:uniform(1000)])
|
||||
end,
|
||||
SequenceNumber = random:uniform(1000000000),
|
||||
Indexes = [{<<"DateOfBirth_int">>, random:uniform(10000)}, {<<"index1_bin">>, lists:concat([random:uniform(1000), "SomeCommonText"])}, {<<"index2_bin">>, <<"RepetitionRepetitionRepetition">>}],
|
||||
SequenceNumber = leveled_rand:uniform(1000000000),
|
||||
Indexes = [{<<"DateOfBirth_int">>, leveled_rand:uniform(10000)}, {<<"index1_bin">>, lists:concat([leveled_rand:uniform(1000), "SomeCommonText"])}, {<<"index2_bin">>, <<"RepetitionRepetitionRepetition">>}],
|
||||
case BlockType of
|
||||
keylist ->
|
||||
Term = {o, Bucket, Key, {Indexes, SequenceNumber}},
|
||||
|
@ -320,4 +320,4 @@ findkey(Key, Tree, keygbtree) ->
|
|||
_ ->
|
||||
true
|
||||
end.
|
||||
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ check_positive(KeyList, Bloom, LoopCount) ->
|
|||
|
||||
|
||||
produce_keylist(KeyCount) ->
|
||||
KeyPrefix = lists:concat(["PositiveKey-", random:uniform(KeyCount)]),
|
||||
KeyPrefix = lists:concat(["PositiveKey-", leveled_rand:uniform(KeyCount)]),
|
||||
produce_keylist(KeyCount, [], KeyPrefix).
|
||||
|
||||
produce_keylist(0, KeyList, _) ->
|
||||
|
@ -46,7 +46,7 @@ check_negative(KeyCount, CheckCount) ->
|
|||
check_negative(Bloom, 0, FalsePos) ->
|
||||
{byte_size(Bloom), FalsePos};
|
||||
check_negative(Bloom, CheckCount, FalsePos) ->
|
||||
Key = lists:concat(["NegativeKey-", CheckCount, random:uniform(CheckCount)]),
|
||||
Key = lists:concat(["NegativeKey-", CheckCount, leveled_rand:uniform(CheckCount)]),
|
||||
case leveled_rice:check_key(Key, Bloom) of
|
||||
true -> check_negative(Bloom, CheckCount - 1, FalsePos + 1);
|
||||
false -> check_negative(Bloom, CheckCount - 1, FalsePos)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue