2017-10-17 20:39:11 +01:00
|
|
|
%% -------- RUNNER ---------
|
|
|
|
%%
|
|
|
|
%% A bookie's runner would traditionally allow remote actors to place bets
|
|
|
|
%% via the runner. In this case the runner will allow a remote actor to
|
|
|
|
%% have query access to the ledger or journal. Runners provide a snapshot of
|
|
|
|
%% the book for querying the backend.
|
|
|
|
%%
|
|
|
|
%% Runners implement the {async, Folder} within Riak backends - returning an
|
2017-10-17 22:31:11 +01:00
|
|
|
%% {async, Runner}. Runner is just a function that provides access to a
|
|
|
|
%% snapshot of the database to allow for a particular query. The
|
2017-10-17 20:39:11 +01:00
|
|
|
%% Runner may make the snapshot at the point it is called, or the snapshot can
|
|
|
|
%% be generated and encapsulated within the function (known as snap_prefold).
|
|
|
|
%%
|
|
|
|
%% Runners which view only the Ledger (the Penciller view of the state) may
|
2017-10-17 22:31:11 +01:00
|
|
|
%% have a CheckPresence boolean - which causes the function to perform a basic
|
2017-10-17 20:39:11 +01:00
|
|
|
%% check that the item is available in the Journal via the Inker as part of
|
|
|
|
%% the fold. This may be useful for anti-entropy folds
|
|
|
|
|
|
|
|
|
|
|
|
-module(leveled_runner).
|
|
|
|
|
|
|
|
-include("include/leveled.hrl").
|
|
|
|
|
|
|
|
-export([
|
|
|
|
bucket_sizestats/3,
|
2018-09-01 10:39:23 +01:00
|
|
|
bucket_list/4,
|
|
|
|
bucket_list/5,
|
2017-10-17 20:39:11 +01:00
|
|
|
index_query/3,
|
|
|
|
bucketkey_query/4,
|
2018-09-21 12:04:32 +01:00
|
|
|
bucketkey_query/6,
|
2017-10-17 20:39:11 +01:00
|
|
|
hashlist_query/3,
|
|
|
|
tictactree/5,
|
2018-11-01 17:30:18 +00:00
|
|
|
foldheads_allkeys/7,
|
2017-11-17 14:54:53 +00:00
|
|
|
foldobjects_allkeys/4,
|
2018-10-31 00:09:24 +00:00
|
|
|
foldheads_bybucket/8,
|
2018-03-01 23:19:52 +00:00
|
|
|
foldobjects_bybucket/4,
|
2017-10-17 20:39:11 +01:00
|
|
|
foldobjects_byindex/3
|
|
|
|
]).
|
|
|
|
|
|
|
|
|
|
|
|
-include_lib("eunit/include/eunit.hrl").
|
|
|
|
|
|
|
|
-define(CHECKJOURNAL_PROB, 0.2).
|
|
|
|
|
2018-05-04 15:24:08 +01:00
|
|
|
-type key_range()
|
2018-06-04 10:57:37 +01:00
|
|
|
:: {leveled_codec:ledger_key()|null,
|
|
|
|
leveled_codec:ledger_key()|null}.
|
2020-12-04 19:40:28 +00:00
|
|
|
-type foldacc() :: any().
|
|
|
|
% Can't currently be specific about what an acc might be
|
|
|
|
|
|
|
|
-type fold_objects_fun()
|
|
|
|
:: fun((leveled_codec:key(), leveled_codec:key(), any(), foldacc())
|
|
|
|
-> foldacc()).
|
|
|
|
-type fold_keys_fun()
|
|
|
|
:: fun((leveled_codec:key(), leveled_codec:key(), foldacc())
|
|
|
|
-> foldacc()).
|
|
|
|
-type fold_buckets_fun()
|
|
|
|
:: fun((leveled_codec:key(), foldacc()) -> foldacc()).
|
|
|
|
-type fold_filter_fun()
|
|
|
|
:: fun((leveled_codec:key(), leveled_codec:key()) -> accumulate|pass).
|
|
|
|
|
|
|
|
-type snap_fun()
|
|
|
|
:: fun(() -> {ok, pid(), pid()|null}).
|
|
|
|
-type runner_fun()
|
|
|
|
:: fun(() -> foldacc()).
|
|
|
|
-type acc_fun()
|
|
|
|
:: fun((leveled_codec:key(), any(), foldacc()) -> foldacc()).
|
2018-03-19 19:47:19 +00:00
|
|
|
|
2018-10-31 00:09:24 +00:00
|
|
|
|
2017-10-17 20:39:11 +01:00
|
|
|
%%%============================================================================
|
|
|
|
%%% External functions
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec bucket_sizestats(snap_fun(),leveled_codec:key(), leveled_codec:tag())
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Fold over a bucket accumulating the count of objects and their total sizes
|
|
|
|
bucket_sizestats(SnapFun, Bucket, Tag) ->
|
|
|
|
StartKey = leveled_codec:to_ledgerkey(Bucket, null, Tag),
|
|
|
|
EndKey = leveled_codec:to_ledgerkey(Bucket, null, Tag),
|
|
|
|
AccFun = accumulate_size(),
|
|
|
|
Runner =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnap, _JournalSnap} = SnapFun(),
|
|
|
|
Acc = leveled_penciller:pcl_fetchkeys(LedgerSnap,
|
2018-09-24 19:54:28 +01:00
|
|
|
StartKey,
|
|
|
|
EndKey,
|
|
|
|
AccFun,
|
|
|
|
{0, 0},
|
|
|
|
as_pcl),
|
2017-10-17 20:39:11 +01:00
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnap),
|
|
|
|
Acc
|
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec bucket_list(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
fold_buckets_fun(), foldacc()) -> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
2018-09-01 10:39:23 +01:00
|
|
|
%% List buckets for tag, assuming bucket names are all either binary, ascii
|
|
|
|
%% strings or integers
|
|
|
|
bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc) ->
|
|
|
|
bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc, -1).
|
2018-03-21 15:31:00 +00:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec bucket_list(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
fold_buckets_fun(), foldacc(),
|
|
|
|
integer()) -> {async, runner_fun()}.
|
2018-03-21 15:31:00 +00:00
|
|
|
%% @doc
|
|
|
|
%% set Max Buckets to -1 to list all buckets, otherwise will only return
|
|
|
|
%% MaxBuckets (use 1 to confirm that there exists any bucket for a given Tag)
|
2018-09-01 10:39:23 +01:00
|
|
|
bucket_list(SnapFun, Tag, FoldBucketsFun, InitAcc, MaxBuckets) ->
|
2017-10-17 20:39:11 +01:00
|
|
|
Runner =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnapshot, _JournalSnapshot} = SnapFun(),
|
2018-03-21 15:31:00 +00:00
|
|
|
BucketAcc =
|
|
|
|
get_nextbucket(null, null,
|
|
|
|
Tag, LedgerSnapshot, [], {0, MaxBuckets}),
|
2018-11-23 18:56:30 +00:00
|
|
|
AfterFun =
|
|
|
|
fun() ->
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot)
|
|
|
|
end,
|
|
|
|
FoldRunner =
|
|
|
|
fun() ->
|
|
|
|
lists:foldr(fun({B, _K}, Acc) -> FoldBucketsFun(B, Acc) end,
|
|
|
|
InitAcc,
|
|
|
|
BucketAcc)
|
|
|
|
% Buckets in reverse alphabetical order so foldr
|
|
|
|
end,
|
|
|
|
% For this fold, the fold over the store is actually completed
|
|
|
|
% before results are passed to the FoldBucketsFun to be
|
|
|
|
% accumulated. Using a throw to exit the fold early will not
|
|
|
|
% in this case save significant time.
|
|
|
|
wrap_runner(FoldRunner, AfterFun)
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec index_query(snap_fun(),
|
2018-05-04 15:24:08 +01:00
|
|
|
{leveled_codec:ledger_key(),
|
|
|
|
leveled_codec:ledger_key(),
|
|
|
|
{boolean(), undefined|re:mp()|iodata()}},
|
2020-12-04 19:40:28 +00:00
|
|
|
{fold_keys_fun(), foldacc()})
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Secondary index query
|
2018-09-24 20:05:48 +01:00
|
|
|
%% This has the special capability that it will expect a message to be thrown
|
|
|
|
%% during the query - and handle this without crashing the penciller snapshot
|
|
|
|
%% This allows for this query to be used with a max_results check in the
|
|
|
|
%% applictaion - and to throw a stop message to be caught by the worker
|
|
|
|
%% handling the runner. This behaviour will not prevent the snapshot from
|
|
|
|
%% closing neatly, allowing delete_pending files to be cleared without waiting
|
|
|
|
%% for a timeout
|
2017-10-17 20:39:11 +01:00
|
|
|
index_query(SnapFun, {StartKey, EndKey, TermHandling}, FoldAccT) ->
|
|
|
|
{FoldKeysFun, InitAcc} = FoldAccT,
|
|
|
|
{ReturnTerms, TermRegex} = TermHandling,
|
|
|
|
AddFun =
|
|
|
|
case ReturnTerms of
|
|
|
|
true ->
|
|
|
|
fun add_terms/2;
|
|
|
|
_ ->
|
|
|
|
fun add_keys/2
|
|
|
|
end,
|
|
|
|
AccFun = accumulate_index(TermRegex, AddFun, FoldKeysFun),
|
2018-09-24 19:54:28 +01:00
|
|
|
|
2017-10-17 20:39:11 +01:00
|
|
|
Runner =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnapshot, _JournalSnapshot} = SnapFun(),
|
2018-09-24 19:54:28 +01:00
|
|
|
Folder = leveled_penciller:pcl_fetchkeys(LedgerSnapshot,
|
|
|
|
StartKey,
|
|
|
|
EndKey,
|
|
|
|
AccFun,
|
|
|
|
InitAcc,
|
|
|
|
by_runner),
|
|
|
|
AfterFun =
|
|
|
|
fun() ->
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot)
|
|
|
|
end,
|
|
|
|
wrap_runner(Folder, AfterFun)
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec bucketkey_query(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
leveled_codec:key()|null,
|
|
|
|
key_range(),
|
|
|
|
{fold_keys_fun(), foldacc()},
|
|
|
|
leveled_codec:regular_expression())
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
2018-03-19 19:47:19 +00:00
|
|
|
%% Fold over all keys in `KeyRange' under tag (restricted to a given bucket)
|
2018-05-04 15:24:08 +01:00
|
|
|
bucketkey_query(SnapFun, Tag, Bucket,
|
|
|
|
{StartKey, EndKey},
|
2018-09-21 12:04:32 +01:00
|
|
|
{FoldKeysFun, InitAcc},
|
|
|
|
TermRegex) ->
|
2018-03-19 19:47:19 +00:00
|
|
|
SK = leveled_codec:to_ledgerkey(Bucket, StartKey, Tag),
|
|
|
|
EK = leveled_codec:to_ledgerkey(Bucket, EndKey, Tag),
|
2018-09-21 12:04:32 +01:00
|
|
|
AccFun = accumulate_keys(FoldKeysFun, TermRegex),
|
2018-03-19 19:47:19 +00:00
|
|
|
Runner =
|
2017-10-17 20:39:11 +01:00
|
|
|
fun() ->
|
2018-09-24 19:54:28 +01:00
|
|
|
{ok, LedgerSnapshot, _JournalSnapshot} = SnapFun(),
|
|
|
|
Folder = leveled_penciller:pcl_fetchkeys(LedgerSnapshot,
|
|
|
|
SK,
|
|
|
|
EK,
|
|
|
|
AccFun,
|
|
|
|
InitAcc,
|
|
|
|
by_runner),
|
|
|
|
AfterFun =
|
|
|
|
fun() ->
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot)
|
|
|
|
end,
|
|
|
|
wrap_runner(Folder, AfterFun)
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec bucketkey_query(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
leveled_codec:key()|null,
|
|
|
|
{fold_keys_fun(), foldacc()}) -> {async, runner_fun()}.
|
2018-03-19 19:47:19 +00:00
|
|
|
%% @doc
|
|
|
|
%% Fold over all keys under tag (potentially restricted to a given bucket)
|
|
|
|
bucketkey_query(SnapFun, Tag, Bucket, FunAcc) ->
|
2018-09-21 12:04:32 +01:00
|
|
|
bucketkey_query(SnapFun, Tag, Bucket, {null, null}, FunAcc, undefined).
|
2018-03-19 19:47:19 +00:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec hashlist_query(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
boolean()) -> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
2018-09-24 20:43:21 +01:00
|
|
|
%% Fold over the keys under a given Tag accumulating the hashes
|
2017-10-17 20:39:11 +01:00
|
|
|
hashlist_query(SnapFun, Tag, JournalCheck) ->
|
|
|
|
StartKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
|
|
|
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
|
|
|
Runner =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnapshot, JournalSnapshot} = SnapFun(),
|
|
|
|
AccFun = accumulate_hashes(JournalCheck, JournalSnapshot),
|
|
|
|
Acc = leveled_penciller:pcl_fetchkeys(LedgerSnapshot,
|
|
|
|
StartKey,
|
|
|
|
EndKey,
|
|
|
|
AccFun,
|
|
|
|
[]),
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot),
|
|
|
|
case JournalCheck of
|
|
|
|
false ->
|
|
|
|
ok;
|
|
|
|
true ->
|
|
|
|
leveled_inker:ink_close(JournalSnapshot)
|
|
|
|
end,
|
|
|
|
Acc
|
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec tictactree(snap_fun(),
|
|
|
|
{leveled_codec:tag(), leveled_codec:key(), tuple()},
|
|
|
|
boolean(), atom(), fold_filter_fun())
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Return a merkle tree from the fold, directly accessing hashes cached in the
|
|
|
|
%% metadata
|
|
|
|
tictactree(SnapFun, {Tag, Bucket, Query}, JournalCheck, TreeSize, Filter) ->
|
|
|
|
% Journal check can be used for object key folds to confirm that the
|
|
|
|
% object is still indexed within the journal
|
|
|
|
Tree = leveled_tictac:new_tree(temp, TreeSize),
|
|
|
|
Runner =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnap, JournalSnap} = SnapFun(),
|
|
|
|
% The start key and end key will vary depending on whether the
|
|
|
|
% fold is to fold over an index or a key range
|
2017-10-30 13:57:41 +00:00
|
|
|
EnsureKeyBinaryFun =
|
|
|
|
fun(K, T) ->
|
|
|
|
case is_binary(K) of
|
|
|
|
true ->
|
|
|
|
{K, T};
|
|
|
|
false ->
|
|
|
|
{term_to_binary(K), T}
|
|
|
|
end
|
|
|
|
end,
|
2017-10-17 20:39:11 +01:00
|
|
|
{StartKey, EndKey, ExtractFun} =
|
|
|
|
case Tag of
|
|
|
|
?IDX_TAG ->
|
|
|
|
{IdxFld, StartIdx, EndIdx} = Query,
|
|
|
|
KeyDefFun = fun leveled_codec:to_ledgerkey/5,
|
|
|
|
{KeyDefFun(Bucket, null, ?IDX_TAG, IdxFld, StartIdx),
|
|
|
|
KeyDefFun(Bucket, null, ?IDX_TAG, IdxFld, EndIdx),
|
2017-10-30 13:57:41 +00:00
|
|
|
EnsureKeyBinaryFun};
|
2017-10-17 20:39:11 +01:00
|
|
|
_ ->
|
|
|
|
{StartOKey, EndOKey} = Query,
|
|
|
|
{leveled_codec:to_ledgerkey(Bucket, StartOKey, Tag),
|
|
|
|
leveled_codec:to_ledgerkey(Bucket, EndOKey, Tag),
|
2017-10-30 13:57:41 +00:00
|
|
|
fun(K, H) ->
|
|
|
|
V = {is_hash, H},
|
|
|
|
EnsureKeyBinaryFun(K, V)
|
|
|
|
end}
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
|
|
|
AccFun =
|
|
|
|
accumulate_tree(Filter, JournalCheck, JournalSnap, ExtractFun),
|
|
|
|
Acc =
|
|
|
|
leveled_penciller:pcl_fetchkeys(LedgerSnap,
|
|
|
|
StartKey, EndKey,
|
|
|
|
AccFun, Tree),
|
|
|
|
|
|
|
|
% Close down snapshot when complete so as not to hold removed
|
|
|
|
% files open
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnap),
|
|
|
|
case JournalCheck of
|
|
|
|
false ->
|
|
|
|
ok;
|
|
|
|
true ->
|
|
|
|
leveled_inker:ink_close(JournalSnap)
|
|
|
|
end,
|
|
|
|
Acc
|
|
|
|
end,
|
|
|
|
{async, Runner}.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldheads_allkeys(snap_fun(), leveled_codec:tag(),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()},
|
|
|
|
boolean(), false|list(integer()),
|
2018-11-01 17:30:18 +00:00
|
|
|
false|leveled_codec:lastmod_range(),
|
2020-12-04 19:40:28 +00:00
|
|
|
false|pos_integer()) -> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Fold over all heads in the store for a given tag - applying the passed
|
|
|
|
%% function to each proxy object
|
2018-11-01 17:30:18 +00:00
|
|
|
foldheads_allkeys(SnapFun, Tag, FoldFun, JournalCheck,
|
|
|
|
SegmentList, LastModRange, MaxObjectCount) ->
|
2017-10-17 20:39:11 +01:00
|
|
|
StartKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
|
|
|
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
2017-10-31 23:28:35 +00:00
|
|
|
foldobjects(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
[{StartKey, EndKey}],
|
2017-10-31 23:28:35 +00:00
|
|
|
FoldFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
{true, JournalCheck},
|
2018-11-01 17:30:18 +00:00
|
|
|
SegmentList,
|
|
|
|
LastModRange,
|
|
|
|
MaxObjectCount).
|
2017-10-17 20:39:11 +01:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldobjects_allkeys(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()},
|
|
|
|
key_order|sqn_order)
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Fold over all objects for a given tag
|
2017-11-17 18:30:51 +00:00
|
|
|
foldobjects_allkeys(SnapFun, Tag, FoldFun, key_order) ->
|
2017-10-17 20:39:11 +01:00
|
|
|
StartKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
|
|
|
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
2017-10-31 23:28:35 +00:00
|
|
|
foldobjects(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
[{StartKey, EndKey}],
|
2017-10-31 23:28:35 +00:00
|
|
|
FoldFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
false,
|
|
|
|
false);
|
2017-11-17 18:30:51 +00:00
|
|
|
foldobjects_allkeys(SnapFun, Tag, FoldObjectsFun, sqn_order) ->
|
|
|
|
% Fold over the journal in order of receipt
|
|
|
|
{FoldFun, InitAcc} =
|
|
|
|
case is_tuple(FoldObjectsFun) of
|
|
|
|
true ->
|
|
|
|
% FoldObjectsFun is already a tuple with a Fold function and an
|
|
|
|
% initial accumulator
|
|
|
|
FoldObjectsFun;
|
|
|
|
false ->
|
|
|
|
% no initial accumulatr passed, and so should be just a list
|
|
|
|
{FoldObjectsFun, []}
|
|
|
|
end,
|
|
|
|
|
|
|
|
FilterFun =
|
|
|
|
fun(JKey, JVal, _Pos, Acc, ExtractFun) ->
|
|
|
|
|
|
|
|
{SQN, InkTag, LedgerKey} = JKey,
|
|
|
|
case {InkTag, leveled_codec:from_ledgerkey(Tag, LedgerKey)} of
|
|
|
|
{?INKT_STND, {B, K}} ->
|
|
|
|
% Ignore tombstones and non-matching Tags and Key changes
|
|
|
|
% objects.
|
|
|
|
{MinSQN, MaxSQN, BatchAcc} = Acc,
|
|
|
|
case SQN of
|
|
|
|
SQN when SQN < MinSQN ->
|
|
|
|
{loop, Acc};
|
|
|
|
SQN when SQN > MaxSQN ->
|
|
|
|
{stop, Acc};
|
|
|
|
_ ->
|
|
|
|
{VBin, _VSize} = ExtractFun(JVal),
|
|
|
|
{Obj, _IdxSpecs} = leveled_codec:split_inkvalue(VBin),
|
|
|
|
ToLoop =
|
|
|
|
case SQN of
|
|
|
|
MaxSQN -> stop;
|
|
|
|
_ -> loop
|
|
|
|
end,
|
|
|
|
{ToLoop,
|
|
|
|
{MinSQN, MaxSQN, [{B, K, SQN, Obj}|BatchAcc]}}
|
|
|
|
end;
|
|
|
|
_ ->
|
|
|
|
{loop, Acc}
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
|
|
|
|
InitAccFun = fun(_FN, _SQN) -> [] end,
|
|
|
|
|
|
|
|
Folder =
|
|
|
|
fun() ->
|
|
|
|
|
|
|
|
{ok, LedgerSnapshot, JournalSnapshot} = SnapFun(),
|
2019-02-14 21:14:11 +00:00
|
|
|
{ok, JournalSQN} = leveled_inker:ink_getjournalsqn(JournalSnapshot),
|
2017-11-17 18:30:51 +00:00
|
|
|
IsValidFun =
|
|
|
|
fun(Bucket, Key, SQN) ->
|
|
|
|
LedgerKey = leveled_codec:to_ledgerkey(Bucket, Key, Tag),
|
2019-02-14 21:14:11 +00:00
|
|
|
CheckSQN =
|
|
|
|
leveled_penciller:pcl_checksequencenumber(LedgerSnapshot,
|
|
|
|
LedgerKey,
|
|
|
|
SQN),
|
|
|
|
% Need to check that we have not folded past the point
|
|
|
|
% at which the snapshot was taken
|
2020-03-09 15:12:48 +00:00
|
|
|
(JournalSQN >= SQN) and (CheckSQN == current)
|
2019-02-14 21:14:11 +00:00
|
|
|
|
2017-11-17 18:30:51 +00:00
|
|
|
end,
|
|
|
|
|
|
|
|
BatchFoldFun =
|
|
|
|
fun(BatchAcc, ObjAcc) ->
|
|
|
|
ObjFun =
|
|
|
|
fun({B, K, SQN, Obj}, Acc) ->
|
|
|
|
case IsValidFun(B, K, SQN) of
|
|
|
|
true ->
|
|
|
|
FoldFun(B, K, Obj, Acc);
|
|
|
|
false ->
|
|
|
|
Acc
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
leveled_log:log("R0001", [length(BatchAcc)]),
|
|
|
|
lists:foldr(ObjFun, ObjAcc, BatchAcc)
|
|
|
|
end,
|
|
|
|
|
2018-11-23 16:00:11 +00:00
|
|
|
InkFolder =
|
2017-11-17 18:30:51 +00:00
|
|
|
leveled_inker:ink_fold(JournalSnapshot,
|
2018-11-23 16:00:11 +00:00
|
|
|
0,
|
|
|
|
{FilterFun,
|
|
|
|
InitAccFun,
|
|
|
|
BatchFoldFun},
|
|
|
|
InitAcc),
|
|
|
|
AfterFun =
|
|
|
|
fun() ->
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot),
|
|
|
|
ok = leveled_inker:ink_close(JournalSnapshot)
|
|
|
|
end,
|
|
|
|
wrap_runner(InkFolder, AfterFun)
|
2017-11-17 18:30:51 +00:00
|
|
|
end,
|
|
|
|
{async, Folder}.
|
|
|
|
|
2017-10-17 20:39:11 +01:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldobjects_bybucket(snap_fun(),
|
2018-05-04 15:24:08 +01:00
|
|
|
leveled_codec:tag(),
|
|
|
|
list(key_range()),
|
2020-12-04 19:40:28 +00:00
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()})
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Fold over all objects within a given key range in a bucket
|
2018-03-01 23:19:52 +00:00
|
|
|
foldobjects_bybucket(SnapFun, Tag, KeyRanges, FoldFun) ->
|
2017-10-31 23:28:35 +00:00
|
|
|
foldobjects(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
KeyRanges,
|
2017-10-31 23:28:35 +00:00
|
|
|
FoldFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
false,
|
|
|
|
false).
|
2017-10-17 20:39:11 +01:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldheads_bybucket(snap_fun(),
|
|
|
|
leveled_codec:tag(),
|
|
|
|
list(key_range()),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()},
|
2018-10-31 00:09:24 +00:00
|
|
|
boolean(),
|
|
|
|
false|list(integer()),
|
|
|
|
false|leveled_codec:lastmod_range(),
|
|
|
|
false|pos_integer())
|
2020-12-04 19:40:28 +00:00
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Fold over all object metadata within a given key range in a bucket
|
2017-11-01 22:00:12 +00:00
|
|
|
foldheads_bybucket(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
KeyRanges,
|
2017-11-01 22:00:12 +00:00
|
|
|
FoldFun,
|
2018-10-31 00:09:24 +00:00
|
|
|
JournalCheck,
|
|
|
|
SegmentList, LastModRange, MaxObjectCount) ->
|
2017-10-31 23:28:35 +00:00
|
|
|
foldobjects(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
KeyRanges,
|
2017-10-31 23:28:35 +00:00
|
|
|
FoldFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
{true, JournalCheck},
|
2018-10-31 00:09:24 +00:00
|
|
|
SegmentList,
|
|
|
|
LastModRange,
|
|
|
|
MaxObjectCount).
|
2017-10-17 20:39:11 +01:00
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldobjects_byindex(snap_fun(),
|
|
|
|
tuple(),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()})
|
|
|
|
-> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% Folds over an index, fetching the objects associated with the keys returned
|
|
|
|
%% and passing those objects into the fold function
|
|
|
|
foldobjects_byindex(SnapFun, {Tag, Bucket, Field, FromTerm, ToTerm}, FoldFun) ->
|
|
|
|
StartKey =
|
|
|
|
leveled_codec:to_ledgerkey(Bucket, null, ?IDX_TAG, Field, FromTerm),
|
|
|
|
EndKey =
|
|
|
|
leveled_codec:to_ledgerkey(Bucket, null, ?IDX_TAG, Field, ToTerm),
|
2017-10-31 23:28:35 +00:00
|
|
|
foldobjects(SnapFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
Tag,
|
|
|
|
[{StartKey, EndKey}],
|
2017-10-31 23:28:35 +00:00
|
|
|
FoldFun,
|
2018-03-01 23:19:52 +00:00
|
|
|
false,
|
|
|
|
false).
|
2017-10-17 20:39:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
%%%============================================================================
|
|
|
|
%%% Internal functions
|
|
|
|
%%%============================================================================
|
|
|
|
|
2018-03-21 15:31:00 +00:00
|
|
|
get_nextbucket(_NextB, _NextK, _Tag, _LS, BKList, {Limit, Limit}) ->
|
2018-09-18 15:40:44 +01:00
|
|
|
lists:reverse(BKList);
|
2018-03-21 15:31:00 +00:00
|
|
|
get_nextbucket(NextBucket, NextKey, Tag, LedgerSnapshot, BKList, {C, L}) ->
|
2017-10-17 20:39:11 +01:00
|
|
|
StartKey = leveled_codec:to_ledgerkey(NextBucket, NextKey, Tag),
|
|
|
|
EndKey = leveled_codec:to_ledgerkey(null, null, Tag),
|
|
|
|
ExtractFun =
|
|
|
|
fun(LK, V, _Acc) ->
|
|
|
|
{leveled_codec:from_ledgerkey(LK), V}
|
|
|
|
end,
|
|
|
|
R = leveled_penciller:pcl_fetchnextkey(LedgerSnapshot,
|
|
|
|
StartKey,
|
|
|
|
EndKey,
|
|
|
|
ExtractFun,
|
|
|
|
null),
|
|
|
|
case R of
|
2018-11-01 23:40:28 +00:00
|
|
|
{1, null} ->
|
2017-10-17 20:39:11 +01:00
|
|
|
leveled_log:log("B0008",[]),
|
|
|
|
BKList;
|
2020-12-04 12:49:17 +00:00
|
|
|
{0, {{B, K}, _V}} ->
|
|
|
|
leveled_log:log("B0009",[B]),
|
|
|
|
get_nextbucket(leveled_codec:next_key(B),
|
|
|
|
null,
|
|
|
|
Tag,
|
|
|
|
LedgerSnapshot,
|
|
|
|
[{B, K}|BKList],
|
|
|
|
{C + 1, L})
|
2017-10-17 20:39:11 +01:00
|
|
|
end.
|
|
|
|
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldobjects(snap_fun(),
|
|
|
|
atom(),
|
|
|
|
list(),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()},
|
2017-10-31 23:28:35 +00:00
|
|
|
false|{true, boolean()}, false|list(integer())) ->
|
2020-12-04 19:40:28 +00:00
|
|
|
{async, runner_fun()}.
|
2018-10-31 00:09:24 +00:00
|
|
|
foldobjects(SnapFun, Tag, KeyRanges, FoldObjFun, DeferredFetch, SegmentList) ->
|
|
|
|
foldobjects(SnapFun, Tag, KeyRanges,
|
|
|
|
FoldObjFun, DeferredFetch, SegmentList, false, false).
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec foldobjects(snap_fun(), atom(), list(),
|
|
|
|
fold_objects_fun()|{fold_objects_fun(), foldacc()},
|
2018-10-31 00:09:24 +00:00
|
|
|
false|{true, boolean()},
|
|
|
|
false|list(integer()),
|
|
|
|
false|leveled_codec:lastmod_range(),
|
2020-12-04 19:40:28 +00:00
|
|
|
false|pos_integer()) -> {async, runner_fun()}.
|
2017-10-17 20:39:11 +01:00
|
|
|
%% @doc
|
|
|
|
%% The object folder should be passed DeferredFetch.
|
|
|
|
%% DeferredFetch can either be false (which will return to the fold function
|
|
|
|
%% the full object), or {true, CheckPresence} - in which case a proxy object
|
|
|
|
%% will be created that if understood by the fold function will allow the fold
|
|
|
|
%% function to work on the head of the object, and defer fetching the body in
|
|
|
|
%% case such a fetch is unecessary.
|
2018-10-31 00:09:24 +00:00
|
|
|
foldobjects(SnapFun, Tag, KeyRanges, FoldObjFun, DeferredFetch,
|
|
|
|
SegmentList, LastModRange, MaxObjectCount) ->
|
2017-10-17 20:39:11 +01:00
|
|
|
{FoldFun, InitAcc} =
|
2018-03-01 23:19:52 +00:00
|
|
|
case is_tuple(FoldObjFun) of
|
2017-10-17 20:39:11 +01:00
|
|
|
true ->
|
|
|
|
% FoldObjectsFun is already a tuple with a Fold function and an
|
|
|
|
% initial accumulator
|
2018-03-01 23:19:52 +00:00
|
|
|
FoldObjFun;
|
2017-10-17 20:39:11 +01:00
|
|
|
false ->
|
2018-05-16 10:34:47 +01:00
|
|
|
% no initial accumulator passed, and so should be just a list
|
2018-03-01 23:19:52 +00:00
|
|
|
{FoldObjFun, []}
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
2018-11-01 23:40:28 +00:00
|
|
|
{LimitByCount, InitAcc0} =
|
|
|
|
case MaxObjectCount of
|
|
|
|
false ->
|
|
|
|
{false, InitAcc};
|
|
|
|
MOC when is_integer(MOC) ->
|
|
|
|
{true, {MOC, InitAcc}}
|
|
|
|
end,
|
2017-10-17 20:39:11 +01:00
|
|
|
|
|
|
|
Folder =
|
|
|
|
fun() ->
|
|
|
|
{ok, LedgerSnapshot, JournalSnapshot} = SnapFun(),
|
2018-11-01 23:40:28 +00:00
|
|
|
AccFun =
|
|
|
|
accumulate_objects(FoldFun,
|
|
|
|
JournalSnapshot,
|
|
|
|
Tag,
|
|
|
|
DeferredFetch),
|
2018-11-23 16:00:11 +00:00
|
|
|
FoldFunGen =
|
2018-03-01 23:19:52 +00:00
|
|
|
fun({StartKey, EndKey}, FoldAcc) ->
|
|
|
|
leveled_penciller:pcl_fetchkeysbysegment(LedgerSnapshot,
|
|
|
|
StartKey,
|
|
|
|
EndKey,
|
|
|
|
AccFun,
|
|
|
|
FoldAcc,
|
2018-10-31 00:09:24 +00:00
|
|
|
SegmentList,
|
|
|
|
LastModRange,
|
2018-11-01 23:40:28 +00:00
|
|
|
LimitByCount)
|
2018-03-01 23:19:52 +00:00
|
|
|
end,
|
2018-11-23 16:00:11 +00:00
|
|
|
AfterFun =
|
|
|
|
fun() ->
|
|
|
|
ok = leveled_penciller:pcl_close(LedgerSnapshot),
|
|
|
|
case DeferredFetch of
|
|
|
|
{true, false} ->
|
|
|
|
ok;
|
|
|
|
_ ->
|
|
|
|
ok = leveled_inker:ink_close(JournalSnapshot)
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
ListFoldFun =
|
|
|
|
fun(KeyRange, Acc) ->
|
|
|
|
Folder = FoldFunGen(KeyRange, Acc),
|
|
|
|
Folder()
|
|
|
|
end,
|
|
|
|
FolderToWrap =
|
|
|
|
fun() -> lists:foldl(ListFoldFun, InitAcc0, KeyRanges) end,
|
|
|
|
wrap_runner(FolderToWrap, AfterFun)
|
2017-10-17 20:39:11 +01:00
|
|
|
end,
|
|
|
|
{async, Folder}.
|
|
|
|
|
|
|
|
|
|
|
|
accumulate_size() ->
|
2020-12-04 12:49:17 +00:00
|
|
|
AccFun =
|
|
|
|
fun(Key, Value, {Size, Count}) ->
|
|
|
|
{Size + leveled_codec:get_size(Key, Value), Count + 1}
|
|
|
|
end,
|
2017-10-17 20:39:11 +01:00
|
|
|
AccFun.
|
|
|
|
|
|
|
|
accumulate_hashes(JournalCheck, InkerClone) ->
|
|
|
|
AddKeyFun =
|
|
|
|
fun(B, K, H, Acc) ->
|
|
|
|
[{B, K, H}|Acc]
|
|
|
|
end,
|
|
|
|
get_hashaccumulator(JournalCheck,
|
|
|
|
InkerClone,
|
|
|
|
AddKeyFun).
|
|
|
|
|
|
|
|
accumulate_tree(FilterFun, JournalCheck, InkerClone, HashFun) ->
|
|
|
|
AddKeyFun =
|
|
|
|
fun(B, K, H, Tree) ->
|
|
|
|
case FilterFun(B, K) of
|
|
|
|
accumulate ->
|
2017-10-30 13:57:41 +00:00
|
|
|
leveled_tictac:add_kv(Tree, K, H, HashFun);
|
2017-10-17 20:39:11 +01:00
|
|
|
pass ->
|
|
|
|
Tree
|
|
|
|
end
|
|
|
|
end,
|
|
|
|
get_hashaccumulator(JournalCheck,
|
|
|
|
InkerClone,
|
|
|
|
AddKeyFun).
|
|
|
|
|
|
|
|
get_hashaccumulator(JournalCheck, InkerClone, AddKeyFun) ->
|
|
|
|
AccFun =
|
|
|
|
fun(LK, V, Acc) ->
|
2020-12-04 12:49:17 +00:00
|
|
|
{B, K, H} = leveled_codec:get_keyandobjhash(LK, V),
|
|
|
|
Check = leveled_rand:uniform() < ?CHECKJOURNAL_PROB,
|
2020-12-04 19:40:28 +00:00
|
|
|
case JournalCheck and Check of
|
|
|
|
true ->
|
2020-12-04 12:49:17 +00:00
|
|
|
case check_presence(LK, V, InkerClone) of
|
|
|
|
true ->
|
|
|
|
AddKeyFun(B, K, H, Acc);
|
|
|
|
false ->
|
|
|
|
Acc
|
2017-10-17 20:39:11 +01:00
|
|
|
end;
|
2020-12-04 12:49:17 +00:00
|
|
|
_ ->
|
|
|
|
AddKeyFun(B, K, H, Acc)
|
2017-10-17 20:39:11 +01:00
|
|
|
end
|
|
|
|
end,
|
|
|
|
AccFun.
|
|
|
|
|
2020-12-04 19:40:28 +00:00
|
|
|
-spec accumulate_objects(fold_objects_fun(),
|
|
|
|
pid()|null,
|
|
|
|
leveled_codec:tag(),
|
|
|
|
false|{true, boolean()})
|
|
|
|
-> acc_fun().
|
2017-10-17 20:39:11 +01:00
|
|
|
accumulate_objects(FoldObjectsFun, InkerClone, Tag, DeferredFetch) ->
|
|
|
|
AccFun =
|
|
|
|
fun(LK, V, Acc) ->
|
|
|
|
% The function takes the Ledger Key and the value from the
|
|
|
|
% ledger (with the value being the object metadata)
|
|
|
|
%
|
|
|
|
% Need to check if this is an active object (so TTL has not
|
|
|
|
% expired).
|
|
|
|
% If this is a deferred_fetch (i.e. the fold is a fold_heads not
|
|
|
|
% a fold_objects), then a metadata object needs to be built to be
|
|
|
|
% returned - but a quick check that Key is present in the Journal
|
|
|
|
% is made first
|
2020-12-04 12:49:17 +00:00
|
|
|
{SQN, _St, _MH, MD} =
|
|
|
|
leveled_codec:striphead_to_v1details(V),
|
|
|
|
{B, K} =
|
|
|
|
case leveled_codec:from_ledgerkey(LK) of
|
|
|
|
{B0, K0} ->
|
|
|
|
{B0, K0};
|
|
|
|
{B0, K0, _T0} ->
|
|
|
|
{B0, K0}
|
|
|
|
end,
|
|
|
|
JK = {leveled_codec:to_ledgerkey(B, K, Tag), SQN},
|
|
|
|
case DeferredFetch of
|
|
|
|
{true, JournalCheck} ->
|
|
|
|
ProxyObj =
|
|
|
|
leveled_codec:return_proxy(Tag, MD, InkerClone, JK),
|
|
|
|
case JournalCheck of
|
|
|
|
true ->
|
|
|
|
InJournal =
|
|
|
|
leveled_inker:ink_keycheck(InkerClone,
|
|
|
|
LK,
|
|
|
|
SQN),
|
|
|
|
case InJournal of
|
|
|
|
probably ->
|
|
|
|
FoldObjectsFun(B, K, ProxyObj, Acc);
|
|
|
|
missing ->
|
|
|
|
Acc
|
2018-12-06 15:31:11 +00:00
|
|
|
end;
|
2017-10-17 20:39:11 +01:00
|
|
|
false ->
|
2020-12-04 12:49:17 +00:00
|
|
|
FoldObjectsFun(B, K, ProxyObj, Acc)
|
2017-10-17 20:39:11 +01:00
|
|
|
end;
|
|
|
|
false ->
|
2020-12-04 12:49:17 +00:00
|
|
|
R = leveled_bookie:fetch_value(InkerClone, JK),
|
|
|
|
case R of
|
|
|
|
not_present ->
|
|
|
|
Acc;
|
|
|
|
Value ->
|
|
|
|
FoldObjectsFun(B, K, Value, Acc)
|
|
|
|
end
|
2017-10-17 20:39:11 +01:00
|
|
|
end
|
|
|
|
end,
|
|
|
|
AccFun.
|
|
|
|
|
2018-02-15 16:14:46 +00:00
|
|
|
|
2017-10-17 20:39:11 +01:00
|
|
|
check_presence(Key, Value, InkerClone) ->
|
|
|
|
{LedgerKey, SQN} = leveled_codec:strip_to_keyseqonly({Key, Value}),
|
|
|
|
case leveled_inker:ink_keycheck(InkerClone, LedgerKey, SQN) of
|
|
|
|
probably ->
|
|
|
|
true;
|
|
|
|
missing ->
|
|
|
|
false
|
|
|
|
end.
|
|
|
|
|
2018-09-21 12:04:32 +01:00
|
|
|
accumulate_keys(FoldKeysFun, TermRegex) ->
|
|
|
|
AccFun =
|
2020-12-04 12:49:17 +00:00
|
|
|
fun(Key, _Value, Acc) ->
|
|
|
|
{B, K} = leveled_codec:from_ledgerkey(Key),
|
|
|
|
case TermRegex of
|
|
|
|
undefined ->
|
|
|
|
FoldKeysFun(B, K, Acc);
|
|
|
|
Re ->
|
|
|
|
case re:run(K, Re) of
|
|
|
|
nomatch ->
|
|
|
|
Acc;
|
|
|
|
_ ->
|
|
|
|
FoldKeysFun(B, K, Acc)
|
|
|
|
end
|
2018-09-21 12:04:32 +01:00
|
|
|
end
|
|
|
|
end,
|
2017-10-17 20:39:11 +01:00
|
|
|
AccFun.
|
|
|
|
|
|
|
|
add_keys(ObjKey, _IdxValue) ->
|
|
|
|
ObjKey.
|
|
|
|
|
|
|
|
add_terms(ObjKey, IdxValue) ->
|
|
|
|
{IdxValue, ObjKey}.
|
|
|
|
|
|
|
|
accumulate_index(TermRe, AddFun, FoldKeysFun) ->
|
|
|
|
case TermRe of
|
|
|
|
undefined ->
|
2020-12-04 12:49:17 +00:00
|
|
|
fun(Key, _Value, Acc) ->
|
|
|
|
{Bucket, ObjKey, IdxValue} = leveled_codec:from_ledgerkey(Key),
|
|
|
|
FoldKeysFun(Bucket, AddFun(ObjKey, IdxValue), Acc)
|
|
|
|
end;
|
2017-10-17 20:39:11 +01:00
|
|
|
TermRe ->
|
2020-12-04 12:49:17 +00:00
|
|
|
fun(Key, _Value, Acc) ->
|
|
|
|
{Bucket, ObjKey, IdxValue} = leveled_codec:from_ledgerkey(Key),
|
|
|
|
case re:run(IdxValue, TermRe) of
|
|
|
|
nomatch ->
|
|
|
|
Acc;
|
|
|
|
_ ->
|
|
|
|
FoldKeysFun(Bucket, AddFun(ObjKey, IdxValue), Acc)
|
|
|
|
end
|
|
|
|
end
|
2017-10-17 20:39:11 +01:00
|
|
|
end.
|
|
|
|
|
2018-09-24 19:54:28 +01:00
|
|
|
-spec wrap_runner(fun(), fun()) -> any().
|
|
|
|
%% @doc
|
|
|
|
%% Allow things to be thrown in folds, and ensure clean-up action is still
|
2018-09-24 20:05:48 +01:00
|
|
|
%% undertaken if they are.
|
|
|
|
%%
|
|
|
|
%% It is assumed this is only used at present by index queries and key folds,
|
|
|
|
%% but the wrap could be applied more generally with further work
|
2018-09-24 19:54:28 +01:00
|
|
|
wrap_runner(FoldAction, AfterAction) ->
|
|
|
|
try FoldAction()
|
|
|
|
catch throw:Throw ->
|
|
|
|
throw(Throw)
|
|
|
|
after AfterAction()
|
|
|
|
end.
|
|
|
|
|
|
|
|
|
2017-10-17 20:39:11 +01:00
|
|
|
%%%============================================================================
|
|
|
|
%%% Test
|
|
|
|
%%%============================================================================
|
|
|
|
|
|
|
|
-ifdef(TEST).
|
|
|
|
|
2018-09-24 19:54:28 +01:00
|
|
|
throw_test() ->
|
|
|
|
StoppedFolder =
|
|
|
|
fun() ->
|
|
|
|
throw(stop_fold)
|
|
|
|
end,
|
|
|
|
CompletedFolder =
|
|
|
|
fun() ->
|
|
|
|
{ok, ['1']}
|
|
|
|
end,
|
|
|
|
AfterAction =
|
|
|
|
fun() ->
|
|
|
|
error
|
|
|
|
end,
|
|
|
|
?assertMatch({ok, ['1']},
|
|
|
|
wrap_runner(CompletedFolder, AfterAction)),
|
|
|
|
?assertException(throw, stop_fold,
|
|
|
|
wrap_runner(StoppedFolder, AfterAction)).
|
2017-10-17 20:39:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
-endif.
|
|
|
|
|
|
|
|
|
2018-03-19 19:47:19 +00:00
|
|
|
|