From 41fb83abd1fec3a5d66b9d7daa135824a913fb61 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Fri, 31 Aug 2018 15:29:38 +0100 Subject: [PATCH 1/8] Add tests for is_empty Where keys are strings or integers, and where subkeys are involved --- src/leveled_bookie.erl | 31 +++++++++++++++++++++++++++++++ src/leveled_codec.erl | 12 +++++++++++- src/leveled_runner.erl | 19 +++++++++++-------- 3 files changed, 53 insertions(+), 9 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index f5d3a6b..4c41893 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -2166,6 +2166,37 @@ is_empty_test() -> ok = leveled_bookie:book_close(Bookie1). +is_empty_headonly_test() -> + RootPath = reset_filestructure(), + {ok, Bookie1} = book_start([{root_path, RootPath}, + {max_journalsize, 1000000}, + {cache_size, 500}, + {head_only, no_lookup}]), + ?assertMatch(true, book_isempty(Bookie1, ?HEAD_TAG)), + ObjSpecs = + [{add, <<"B1">>, <<"K1">>, 1, 100}, + {remove, <<"B1">>, <<"K1">>, 0, null}], + ok = book_mput(Bookie1, ObjSpecs), + ?assertMatch(false, book_isempty(Bookie1, ?HEAD_TAG)), + ok = book_close(Bookie1). + +is_empty_stringkey_test() -> + RootPath = reset_filestructure(), + {ok, Bookie1} = book_start([{root_path, RootPath}, + {max_journalsize, 1000000}, + {cache_size, 500}]), + ?assertMatch(true, book_isempty(Bookie1, ?STD_TAG)), + Past = leveled_util:integer_now() - 300, + ?assertMatch(true, leveled_bookie:book_isempty(Bookie1, ?STD_TAG)), + ok = book_tempput(Bookie1, + "B", "K", {value, <<"V">>}, [], + ?STD_TAG, Past), + ok = book_put(Bookie1, + "B", "K0", {value, <<"V">>}, [], + ?STD_TAG), + ?assertMatch(false, book_isempty(Bookie1, ?STD_TAG)), + ok = book_close(Bookie1). + scan_table_test() -> K1 = leveled_codec:to_ledgerkey(<<"B1">>, <<"K1">>, diff --git a/src/leveled_codec.erl b/src/leveled_codec.erl index 2386336..e90f2c4 100644 --- a/src/leveled_codec.erl +++ b/src/leveled_codec.erl @@ -67,7 +67,8 @@ riak_extract_metadata/2, segment_hash/1, to_lookup/1, - riak_metadata_to_binary/2]). + riak_metadata_to_binary/2, + next_key/1]). -define(V1_VERS, 1). -define(MAGIC, 53). % riak_kv -> riak_object @@ -252,6 +253,8 @@ from_ledgerkey({?IDX_TAG, ?ALL_BUCKETS, {_IdxFld, IdxVal}, {Bucket, Key}}) -> {Bucket, Key, IdxVal}; from_ledgerkey({?IDX_TAG, Bucket, {_IdxFld, IdxVal}, Key}) -> {Bucket, Key, IdxVal}; +from_ledgerkey({?HEAD_TAG, Bucket, Key, SubKey}) -> + {Bucket, {Key, SubKey}}; from_ledgerkey({_Tag, Bucket, Key, _SubKey}) -> {Bucket, Key}. @@ -835,6 +838,13 @@ get_metadata_from_siblings(<>, [LastMod|LastMods]). +next_key(Key) when is_binary(Key) -> + <>; +next_key(Key) when is_integer(Key) -> + Key + 1; +next_key(Key) when is_list(Key) -> + Key ++ [0]. + %%%============================================================================ diff --git a/src/leveled_runner.erl b/src/leveled_runner.erl index b8b34dc..499e3ec 100644 --- a/src/leveled_runner.erl +++ b/src/leveled_runner.erl @@ -437,23 +437,26 @@ get_nextbucket(NextBucket, NextKey, Tag, LedgerSnapshot, BKList, {C, L}) -> null -> leveled_log:log("B0008",[]), BKList; - {{B, K}, V} when is_binary(B), is_binary(K) -> + {{B, K}, V} -> case leveled_codec:is_active({Tag, B, K, null}, V, Now) of true -> leveled_log:log("B0009",[B]), - get_nextbucket(<>, + get_nextbucket(leveled_codec:next_key(B), null, Tag, LedgerSnapshot, [{B, K}|BKList], {C + 1, L}); false -> - get_nextbucket(B, - <>, - Tag, - LedgerSnapshot, - BKList, - {C, L}) + NK = + case Tag of + ?HEAD_TAG -> + {PK, SK} = K, + {PK, leveled_codec:next_key(SK)}; + _ -> + leveled_codec:next_key(K) + end, + get_nextbucket(B, NK, Tag, LedgerSnapshot, BKList, {C, L}) end; {NB, _V} -> leveled_log:log("B0010",[NB]), From 50967438d37f9e123bda70ce48ab722d3ee5caf5 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Sat, 1 Sep 2018 10:39:23 +0100 Subject: [PATCH 2/8] Switch from binary_bucketlist Allow for bucket listing of non-binary buckets (integer buckets, buckets with ascii strings) --- src/leveled_bookie.erl | 8 ++++---- src/leveled_runner.erl | 17 +++++++++-------- test/end_to_end/basic_SUITE.erl | 2 +- test/end_to_end/iterator_SUITE.erl | 10 +++++----- 4 files changed, 19 insertions(+), 18 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index 4c41893..9da7647 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -503,7 +503,7 @@ book_head(Pid, Bucket, Key) -> %% {bucket_stats, Bucket} -> return a key count and total object size within %% a bucket %% {riakbucket_stats, Bucket} -> as above, but for buckets with the Riak Tag -%% {binary_bucketlist, Tag, {FoldKeysFun, Acc}} -> if we assume buckets and +%% {bucket_list, Tag, {FoldKeysFun, Acc}} -> if we assume buckets and %% keys are binaries, provides a fast bucket list function %% {index_query, %% Constraint, @@ -1200,14 +1200,14 @@ get_runner(State, leveled_runner:foldobjects_byindex(SnapFun, {Tag, Bucket, Field, FromTerm, ToTerm}, FoldObjectsFun); -get_runner(State, {binary_bucketlist, Tag, FoldAccT}) -> +get_runner(State, {bucket_list, Tag, FoldAccT}) -> {FoldBucketsFun, Acc} = FoldAccT, SnapFun = return_snapfun(State, ledger, no_lookup, false, false), - leveled_runner:binary_bucketlist(SnapFun, Tag, FoldBucketsFun, Acc); + leveled_runner:bucket_list(SnapFun, Tag, FoldBucketsFun, Acc); get_runner(State, {first_bucket, Tag, FoldAccT}) -> {FoldBucketsFun, Acc} = FoldAccT, SnapFun = return_snapfun(State, ledger, no_lookup, false, false), - leveled_runner:binary_bucketlist(SnapFun, Tag, FoldBucketsFun, Acc, 1); + leveled_runner:bucket_list(SnapFun, Tag, FoldBucketsFun, Acc, 1); %% Set of specific runners, primarily used as exmaples for tests get_runner(State, DeprecatedQuery) -> get_deprecatedrunner(State, DeprecatedQuery). diff --git a/src/leveled_runner.erl b/src/leveled_runner.erl index 499e3ec..4ea08ac 100644 --- a/src/leveled_runner.erl +++ b/src/leveled_runner.erl @@ -23,8 +23,8 @@ -export([ bucket_sizestats/3, - binary_bucketlist/4, - binary_bucketlist/5, + bucket_list/4, + bucket_list/5, index_query/3, bucketkey_query/4, bucketkey_query/5, @@ -73,19 +73,20 @@ bucket_sizestats(SnapFun, Bucket, Tag) -> end, {async, Runner}. --spec binary_bucketlist(fun(), leveled_codec:tag(), fun(), any()) +-spec bucket_list(fun(), leveled_codec:tag(), fun(), any()) -> {async, fun()}. %% @doc -%% List buckets for tag, assuming bucket names are all binary type -binary_bucketlist(SnapFun, Tag, FoldBucketsFun, InitAcc) -> - binary_bucketlist(SnapFun, Tag, FoldBucketsFun, InitAcc, -1). +%% List buckets for tag, assuming bucket names are all either binary, ascii +%% strings or integers +bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc) -> + bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc, -1). --spec binary_bucketlist(fun(), leveled_codec:tag(), fun(), any(), integer()) +-spec bucket_list(fun(), leveled_codec:tag(), fun(), any(), integer()) -> {async, fun()}. %% @doc %% set Max Buckets to -1 to list all buckets, otherwise will only return %% MaxBuckets (use 1 to confirm that there exists any bucket for a given Tag) -binary_bucketlist(SnapFun, Tag, FoldBucketsFun, InitAcc, MaxBuckets) -> +bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc, MaxBuckets) -> Runner = fun() -> {ok, LedgerSnapshot, _JournalSnapshot} = SnapFun(), diff --git a/test/end_to_end/basic_SUITE.erl b/test/end_to_end/basic_SUITE.erl index 14aca22..72028be 100644 --- a/test/end_to_end/basic_SUITE.erl +++ b/test/end_to_end/basic_SUITE.erl @@ -778,7 +778,7 @@ is_empty_test(_Config) -> ok = testutil:book_riakput(Bookie1, TestObject3, TestSpec3), FoldBucketsFun = fun(B, Acc) -> sets:add_element(B, Acc) end, - BucketListQuery = {binary_bucketlist, + BucketListQuery = {bucket_list, ?RIAK_TAG, {FoldBucketsFun, sets:new()}}, {async, BL} = leveled_bookie:book_returnfolder(Bookie1, BucketListQuery), diff --git a/test/end_to_end/iterator_SUITE.erl b/test/end_to_end/iterator_SUITE.erl index 3a235c7..4a78ed6 100644 --- a/test/end_to_end/iterator_SUITE.erl +++ b/test/end_to_end/iterator_SUITE.erl @@ -173,13 +173,13 @@ small_load_with2i(_Config) -> true = Total2 == Total1, FoldBucketsFun = fun(B, Acc) -> sets:add_element(B, Acc) end, - % Should not find any buckets - as there is a non-binary bucket, and no - % binary ones - BucketListQuery = {binary_bucketlist, + % this should find Bucket and Bucket1 - as we can now find string-based + % buckets using bucket_list - i.e. it isn't just binary buckets now + BucketListQuery = {bucket_list, ?RIAK_TAG, {FoldBucketsFun, sets:new()}}, {async, BL} = leveled_bookie:book_returnfolder(Bookie2, BucketListQuery), - true = sets:size(BL()) == 0, + true = sets:size(BL()) == 2, ok = leveled_bookie:book_close(Bookie2), testutil:reset_filestructure(). @@ -394,7 +394,7 @@ query_count(_Config) -> testutil:check_forobject(Book4, TestObject), FoldBucketsFun = fun(B, Acc) -> sets:add_element(B, Acc) end, - BucketListQuery = {binary_bucketlist, + BucketListQuery = {bucket_list, ?RIAK_TAG, {FoldBucketsFun, sets:new()}}, {async, BLF1} = leveled_bookie:book_returnfolder(Book4, BucketListQuery), From bde188e691cb226b5f941a249d291d98b7918947 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Sat, 1 Sep 2018 12:10:56 +0100 Subject: [PATCH 3/8] Constrain keys Rather than supporting any() - constrain at least to binary()/integer() or string(). --- src/leveled_bookie.erl | 11 ++++++----- src/leveled_log.erl | 2 -- src/leveled_runner.erl | 5 +---- 3 files changed, 7 insertions(+), 11 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index 9da7647..4d439b9 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -162,6 +162,7 @@ -type head_timings() :: no_timing|#head_timings{}. -type timing_types() :: head|get|put|fold. -type recent_aae() :: false|#recent_aae{}|undefined. +-type key() :: binary()|integer()|string(). -type open_options() :: %% For full description of options see ../docs/STARTUP_OPTIONS.md [{root_path, string()|undefined} | @@ -329,7 +330,7 @@ book_start(Opts) -> gen_server:start_link(?MODULE, [set_defaults(Opts)], []). --spec book_tempput(pid(), any(), any(), any(), +-spec book_tempput(pid(), key(), key(), any(), leveled_codec:index_specs(), leveled_codec:tag(), integer()) -> ok|pause. @@ -396,7 +397,7 @@ book_put(Pid, Bucket, Key, Object, IndexSpecs) -> book_put(Pid, Bucket, Key, Object, IndexSpecs, Tag) -> book_put(Pid, Bucket, Key, Object, IndexSpecs, Tag, infinity). --spec book_put(pid(), any(), any(), any(), +-spec book_put(pid(), key(), key(), any(), leveled_codec:index_specs(), leveled_codec:tag(), infinity|integer()) -> ok|pause. @@ -432,7 +433,7 @@ book_mput(Pid, ObjectSpecs) -> book_mput(Pid, ObjectSpecs, TTL) -> gen_server:call(Pid, {mput, ObjectSpecs, TTL}, infinity). --spec book_delete(pid(), any(), any(), leveled_codec:index_specs()) +-spec book_delete(pid(), key(), key(), leveled_codec:index_specs()) -> ok|pause. %% @doc @@ -444,9 +445,9 @@ book_delete(Pid, Bucket, Key, IndexSpecs) -> book_put(Pid, Bucket, Key, delete, IndexSpecs, ?STD_TAG). --spec book_get(pid(), any(), any(), leveled_codec:tag()) +-spec book_get(pid(), key(), key(), leveled_codec:tag()) -> {ok, any()}|not_found. --spec book_head(pid(), any(), any(), leveled_codec:tag()) +-spec book_head(pid(), key(), key(), leveled_codec:tag()) -> {ok, any()}|not_found. %% @doc - GET and HEAD requests diff --git a/src/leveled_log.erl b/src/leveled_log.erl index e5295ae..b52f1cc 100644 --- a/src/leveled_log.erl +++ b/src/leveled_log.erl @@ -40,8 +40,6 @@ {info, "Bucket list finds no more results"}}, {"B0009", {info, "Bucket list finds Bucket ~w"}}, - {"B0010", - {info, "Bucket list finds non-binary Bucket ~w"}}, {"B0011", {warn, "Call to destroy the store and so all files to be removed"}}, {"B0013", diff --git a/src/leveled_runner.erl b/src/leveled_runner.erl index 4ea08ac..7cf720f 100644 --- a/src/leveled_runner.erl +++ b/src/leveled_runner.erl @@ -458,10 +458,7 @@ get_nextbucket(NextBucket, NextKey, Tag, LedgerSnapshot, BKList, {C, L}) -> leveled_codec:next_key(K) end, get_nextbucket(B, NK, Tag, LedgerSnapshot, BKList, {C, L}) - end; - {NB, _V} -> - leveled_log:log("B0010",[NB]), - [] + end end. From 499a30716b9bee357407cd45f10257abdc0336f5 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 3 Sep 2018 09:54:45 +0100 Subject: [PATCH 4/8] Head only mode --- docs/DESIGN.md | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/docs/DESIGN.md b/docs/DESIGN.md index 7e3b730..23142a5 100644 --- a/docs/DESIGN.md +++ b/docs/DESIGN.md @@ -1,4 +1,4 @@ -## Design +## Design The store is written in Erlang using the actor model, the primary actors being: @@ -118,11 +118,17 @@ Three potential recovery strategies are supported to provide some flexibility fo - retain - on compaction KeyDeltas are retained in the Journal, only values are removed. -- recalc (not yet implemented) - the compaction rules assume that on recovery the key changes will be recalculated by comparing the change with the current database state. +- recalc (not yet implemented) - the compaction rules assume that on recovery the key changes will be recalculated by comparing the change with the current database state. In recovery the key changes will be recalculated by comparing the change with the current database state. +## Head only +Leveled can be started in `head_only` mode. This is a special mode which dispenses with the long-term role of the Journal in retaining data. This is a mode to be used in *special circumstances* when values are small, and Key/Value pairs are added in batches. -n recovery the key changes will be recalculated by comparing the change with the current database state. +In `head_only` mode, batches of keys and values are stored first in the Journal, however once the last element received by the Journal file has been persisted into the Ledger, the Journal file can be deleted. The values are never returned from +Journal except during startup to recover the in-memory part of the Ledger (the Bookie and Penciller's memory). +There are two ways in which `head_only` mode can be enabled - `with_lookup` and `no_lookup`. In `with_lookup` mode the an individual value can be fetched from Leveled using a HEAD request. In `no_lookup` mode, HEAD requests are not supported - values can only be returned using `fold_heads`. The `no_lookup` mode is marginally more efficient in terms of CPU usage when under write pressure (it avoids generating key hashes and hash-based lookup indexes within the Penciller). +The `head_only` mode was created so that it could be used as an AAE store in Riak - where values may simply be a version vector or a hash, and retention of data is not critical (it is not the primary store of real user's data). Leveled is not optimised for supporting small values, the `head_only` mode improves on this when supporting small values. However, the intention is that Leveled should remain for the long-term an LSM tree designed for scenarios with larger values. Features, testing and support for `head_only` modes will be limited compared to support for Leveled running in its standard mode of operation. For use cases where there is a need for `head_only` behaviour in the primary data store - then an alternative store would be a safer choice. +There is no ability to mix `head_only` behaviour with standard behaviour. there is no expected behaviour when switching databases between different `head_only` modes, and data loss is highly likely. From ca8aa051d7571f99e35185d8219809297649ff21 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 3 Sep 2018 10:29:20 +0100 Subject: [PATCH 5/8] Add head_only fold_keys test --- src/leveled_bookie.erl | 51 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index 4d439b9..88da5ef 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -2181,6 +2181,57 @@ is_empty_headonly_test() -> ?assertMatch(false, book_isempty(Bookie1, ?HEAD_TAG)), ok = book_close(Bookie1). + +foldkeys_headonly_test() -> + foldkeys_headonly_tester(5000, 25). + + +foldkeys_headonly_tester(ObjectCount, BlockSize) -> + RootPath = reset_filestructure(), + BStr = "BucketStr", + + {ok, Bookie1} = book_start([{root_path, RootPath}, + {max_journalsize, 1000000}, + {cache_size, 500}, + {head_only, no_lookup}]), + GenObjSpecFun = + fun(I) -> + Key = I rem 6, + {add, BStr, <>, I, erlang:phash2(I)} + end, + ObjSpecs = lists:map(GenObjSpecFun, lists:seq(1, ObjectCount)), + ObjSpecBlocks = + lists:map(fun(I) -> + lists:sublist(ObjSpecs, I * BlockSize + 1, BlockSize) + end, + lists:seq(0, ObjectCount div BlockSize - 1)), + lists:map(fun(Block) -> book_mput(Bookie1, Block) end, ObjSpecBlocks), + ?assertMatch(false, book_isempty(Bookie1, ?HEAD_TAG)), + + FolderT = + {keylist, + ?HEAD_TAG, BStr, + {fun(_B, {K, SK}, Acc) -> [{K, SK}|Acc] end, []} + }, + {async, Folder1} = book_returnfolder(Bookie1, FolderT), + Key_SKL1 = lists:reverse(Folder1()), + Key_SKL_Compare = + lists:usort(lists:map(fun({add, _B, K, SK, _V}) -> {K, SK} end, ObjSpecs)), + ?assertMatch(Key_SKL_Compare, Key_SKL1), + + ok = book_close(Bookie1), + + {ok, Bookie2} = book_start([{root_path, RootPath}, + {max_journalsize, 1000000}, + {cache_size, 500}, + {head_only, no_lookup}]), + {async, Folder2} = book_returnfolder(Bookie2, FolderT), + Key_SKL2 = lists:reverse(Folder2()), + ?assertMatch(Key_SKL_Compare, Key_SKL2), + + ok = book_close(Bookie2). + + is_empty_stringkey_test() -> RootPath = reset_filestructure(), {ok, Bookie1} = book_start([{root_path, RootPath}, From f400e8b46dd7b8763c43653f55e8232068cb8a99 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 3 Sep 2018 10:38:19 +0100 Subject: [PATCH 6/8] Add test for string and binary buckets Tried an integer bucket - and this didn't work! Integers are < null - and so is_empty won't work as expected. --- src/leveled_bookie.erl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index 88da5ef..bcc8d95 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -2183,12 +2183,12 @@ is_empty_headonly_test() -> foldkeys_headonly_test() -> - foldkeys_headonly_tester(5000, 25). + foldkeys_headonly_tester(5000, 25, "BucketStr"), + foldkeys_headonly_tester(2000, 25, <<"B0">>). -foldkeys_headonly_tester(ObjectCount, BlockSize) -> +foldkeys_headonly_tester(ObjectCount, BlockSize, BStr) -> RootPath = reset_filestructure(), - BStr = "BucketStr", {ok, Bookie1} = book_start([{root_path, RootPath}, {max_journalsize, 1000000}, From 402dd2a66356b6f770c5242b64db9dbfec8a1bf2 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 3 Sep 2018 10:50:28 +0100 Subject: [PATCH 7/8] Remove integer() support for keys As number < atom() and so integer() < null. This causes potential issues where we assume null < then any key value --- src/leveled_bookie.erl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index bcc8d95..b606b2f 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -162,7 +162,7 @@ -type head_timings() :: no_timing|#head_timings{}. -type timing_types() :: head|get|put|fold. -type recent_aae() :: false|#recent_aae{}|undefined. --type key() :: binary()|integer()|string(). +-type key() :: binary()|string(). -type open_options() :: %% For full description of options see ../docs/STARTUP_OPTIONS.md [{root_path, string()|undefined} | @@ -2175,8 +2175,8 @@ is_empty_headonly_test() -> {head_only, no_lookup}]), ?assertMatch(true, book_isempty(Bookie1, ?HEAD_TAG)), ObjSpecs = - [{add, <<"B1">>, <<"K1">>, 1, 100}, - {remove, <<"B1">>, <<"K1">>, 0, null}], + [{add, <<"B1">>, <<"K1">>, <<1:8/integer>>, 100}, + {remove, <<"B1">>, <<"K1">>, <<0:8/integer>>, null}], ok = book_mput(Bookie1, ObjSpecs), ?assertMatch(false, book_isempty(Bookie1, ?HEAD_TAG)), ok = book_close(Bookie1). @@ -2197,7 +2197,7 @@ foldkeys_headonly_tester(ObjectCount, BlockSize, BStr) -> GenObjSpecFun = fun(I) -> Key = I rem 6, - {add, BStr, <>, I, erlang:phash2(I)} + {add, BStr, <>, integer_to_list(I), I} end, ObjSpecs = lists:map(GenObjSpecFun, lists:seq(1, ObjectCount)), ObjSpecBlocks = From c64dc1df0dad43357351dedc9dec18de520ed733 Mon Sep 17 00:00:00 2001 From: Martin Sumner Date: Mon, 3 Sep 2018 12:28:31 +0100 Subject: [PATCH 8/8] Change key() definition to not allow integer keys --- src/leveled_bookie.erl | 4 ++++ src/leveled_codec.erl | 7 +++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/leveled_bookie.erl b/src/leveled_bookie.erl index b606b2f..901f6b8 100644 --- a/src/leveled_bookie.erl +++ b/src/leveled_bookie.erl @@ -163,6 +163,8 @@ -type timing_types() :: head|get|put|fold. -type recent_aae() :: false|#recent_aae{}|undefined. -type key() :: binary()|string(). + % Keys SHOULD be binary() + % string() support is a legacy of old tests -type open_options() :: %% For full description of options see ../docs/STARTUP_OPTIONS.md [{root_path, string()|undefined} | @@ -279,6 +281,8 @@ % Defaults to ?COMPRESSION_POINT ]. +-export_type([key/0]). + %%%============================================================================ %%% API diff --git a/src/leveled_codec.erl b/src/leveled_codec.erl index e90f2c4..e23f04c 100644 --- a/src/leveled_codec.erl +++ b/src/leveled_codec.erl @@ -837,16 +837,15 @@ get_metadata_from_siblings(<>, MetaBin:MetaLen/binary>>, [LastMod|LastMods]). - +-spec next_key(leveled_bookie:key()) -> leveled_bookie:key(). +%% @doc +%% Get the next key to iterate from a given point next_key(Key) when is_binary(Key) -> <>; -next_key(Key) when is_integer(Key) -> - Key + 1; next_key(Key) when is_list(Key) -> Key ++ [0]. - %%%============================================================================ %%% Test %%%============================================================================