merge new_tests branch

This commit is contained in:
alisdair sullivan 2013-02-24 03:02:16 -08:00
commit 2e5bb4fd3b
94 changed files with 2922 additions and 4822 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

View file

@ -1 +0,0 @@
["foo","bar", "baz",[true],[false],[null],true, false, null, 0.7, {"key":"value"},[{}, null,null,null,[]],"\n\r\\", [-1]]

View file

@ -1,29 +0,0 @@
{name, "array"}.
{jsx, [start_array,
{string,<<"foo">>},
{string,<<"bar">>},
{string,<<"baz">>},
start_array,
{literal,true},
end_array,start_array,
{literal,false},
end_array,start_array,
{literal,null},
end_array,
{literal,true},
{literal,false},
{literal,null},
{float,0.7},
start_object,
{key,<<"key">>},
{string,<<"value">>},
end_object,start_array,start_object,end_object,
{literal,null},
{literal,null},
{literal,null},
start_array,end_array,end_array,
{string,<<"\n\r\\">>},
start_array,
{integer,-1},
end_array,end_array,end_json]}.
{json, "array.json"}.

View file

@ -1 +0,0 @@
["\ud801\u0032"]

View file

@ -1,3 +0,0 @@
{name, "bad_low_surrogate"}.
{jsx, {error, badarg}}.
{json, "bad_low_surrogate.json"}.

View file

@ -1 +0,0 @@
["\ud801\u0032"]

View file

@ -1,4 +0,0 @@
{name, "bad_low_surrogate_replaced"}.
{jsx, [start_array,{string, <<16#fffd/utf8, 16#fffd/utf8>>},end_array,end_json]}.
{json, "bad_low_surrogate_replaced.json"}.
{jsx_flags, [replaced_bad_utf8]}.

View file

@ -1 +0,0 @@
1 1

View file

@ -1,3 +0,0 @@
{name, "bad naked number"}.
{jsx, {error, badarg}}.
{json, "bad_naked_number.json"}.

View file

@ -1 +0,0 @@
[]

View file

@ -1,3 +0,0 @@
{name, "byte order mark"}.
{jsx, [start_array, end_array, end_json]}.
{json, "bom.json"}.

View file

@ -1,10 +0,0 @@
// comment
{ // comment
"key" // comment
: // comment
[ // comment
true // comment
, // comment
false // comment
] // comment
} // comment

View file

@ -1,4 +0,0 @@
{name, "comment_style_a"}.
{jsx, [start_object,{key, <<"key">>}, start_array, {literal, true}, {literal, false}, end_array, end_object,end_json]}.
{json, "comment_style_a.json"}.
{jsx_flags, [comments]}.

View file

@ -1 +0,0 @@
/* comment */ { /* comment */ "key" /* comment */ : /* comment */ [ /* comment */ true /* comment */ , /* comment */ false /* comment */ ] /* comment */ } /* comment */

View file

@ -1,4 +0,0 @@
{name, "comment_style_a"}.
{jsx, [start_object,{key, <<"key">>}, start_array, {literal, true}, {literal, false}, end_array, end_object,end_json]}.
{json, "comment_style_a.json"}.
{jsx_flags, [comments]}.

View file

@ -1 +0,0 @@
[[[]]]

View file

@ -1,3 +0,0 @@
{name, "deep array"}.
{jsx, [start_array,start_array,start_array,end_array,end_array,end_array,end_json]}.
{json, "deep_array.json"}.

View file

@ -1 +0,0 @@
[]

View file

@ -1,3 +0,0 @@
{name, "empty_array"}.
{jsx, [start_array,end_array,end_json]}.
{json, "empty_array.json"}.

View file

@ -1 +0,0 @@
{}

View file

@ -1,3 +0,0 @@
{name, "empty_object"}.
{jsx, [start_object,end_object,end_json]}.
{json, "empty_object.json"}.

View file

@ -1 +0,0 @@
[{}]

View file

@ -1,3 +0,0 @@
{name, "empty_object_in_array"}.
{jsx, [start_array,start_object,end_object,end_array,end_json]}.
{json, "empty_object_in_array.json"}.

View file

@ -1 +0,0 @@
""

View file

@ -1,3 +0,0 @@
{name, "empty_string"}.
{jsx, [{string, <<>>},end_json]}.
{json, "empty_string.json"}.

View file

@ -1 +0,0 @@
["\ud801\udc00"]

View file

@ -1,3 +0,0 @@
{name, "encoded_surrogates"}.
{jsx, [start_array,{string,<<66560/utf8>>},end_array,end_json]}.
{json, "encoded_surrogates.json"}.

View file

@ -1 +0,0 @@
"\u0012"

View file

@ -1,3 +0,0 @@
{name, "escaped_control"}.
{jsx, [{string, <<18>>},end_json]}.
{json, "escaped_control.json"}.

View file

@ -1 +0,0 @@
[ "\"\\\/\b\f\n\r\t\u0020" ]

View file

@ -1,5 +0,0 @@
{name, "escapes"}.
{jsx, [start_array,
{string,<<"\"\\/\b\f\n\r\t ">>},
end_array,end_json]}.
{json, "escapes.json"}.

View file

@ -1 +0,0 @@
[[2.0e7], 2.0e7, {"key":2.0e7, "another key":2.0E7}, 4.2e70 ]

View file

@ -1,14 +0,0 @@
{name, "exp"}.
{jsx, [start_array,start_array,
{float,2.0e7},
end_array,
{float,2.0e7},
start_object,
{key,<<"key">>},
{float,2.0e7},
{key,<<"another key">>},
{float,2.0e7},
end_object,
{float,4.2e70},
end_array,end_json]}.
{json, "exp.json"}.

View file

@ -1 +0,0 @@
false

View file

@ -1,3 +0,0 @@
{name, "false"}.
{jsx, [{literal, false},end_json]}.
{json, "false.json"}.

View file

@ -1 +0,0 @@
[[2.0], 2.0, {"key":2.0e7, "another key":2.0E7}, {"key":2.0, "another key":2.0}, 4.321 ]

View file

@ -1,19 +0,0 @@
{name, "fraction"}.
{jsx, [start_array,start_array,
{float,2.0},
end_array,
{float,2.0},
start_object,
{key,<<"key">>},
{float,2.0e7},
{key,<<"another key">>},
{float,2.0e7},
end_object,start_object,
{key,<<"key">>},
{float,2.0},
{key,<<"another key">>},
{float,2.0},
end_object,
{float,4.321},
end_array,end_json]}.
{json, "fraction.json"}.

View file

@ -1 +0,0 @@
[[20], 20, {"key":20, "another key":20}, 42 ]

View file

@ -1,14 +0,0 @@
{name, "integer"}.
{jsx, [start_array,start_array,
{integer,20},
end_array,
{integer,20},
start_object,
{key,<<"key">>},
{integer,20},
{key,<<"another key">>},
{integer,20},
end_object,
{integer,42},
end_array,end_json]}.
{json, "integer.json"}.

View file

@ -1 +0,0 @@
[ " 𝄞 " ]

View file

@ -1,3 +0,0 @@
{name, "multibyte_utf"}.
{jsx, [start_array,{string,<<32,119070/utf8,32>>},end_array,end_json]}.
{json, "multibyte_utf.json"}.

View file

@ -1 +0,0 @@
42

View file

@ -1,3 +0,0 @@
{name, "naked_number_a"}.
{jsx, [{integer,42},end_json]}.
{json, "naked_number_a.json"}.

View file

@ -1 +0,0 @@
-42

View file

@ -1,3 +0,0 @@
{name, "naked_number_b"}.
{jsx, [{integer,-42},end_json]}.
{json, "naked_number_b.json"}.

View file

@ -1 +0,0 @@
-0.7

View file

@ -1,3 +0,0 @@
{name, "naked_number_c"}.
{jsx, [{float,-0.7},end_json]}.
{json, "naked_number_c.json"}.

View file

@ -1 +0,0 @@
0.7

View file

@ -1,3 +0,0 @@
{name, "naked_number_d"}.
{jsx, [{float,0.7},end_json]}.
{json, "naked_number_d.json"}.

View file

@ -1 +0,0 @@
0

View file

@ -1,3 +0,0 @@
{name, "naked_number_e"}.
{jsx, [{integer,0},end_json]}.
{json, "naked_number_e.json"}.

View file

@ -1 +0,0 @@
1e100

View file

@ -1,3 +0,0 @@
{name, "naked_number_f"}.
{jsx, [{float,1.0e100},end_json]}.
{json, "naked_number_f.json"}.

View file

@ -1 +0,0 @@
7

View file

@ -1,3 +0,0 @@
{name, "naked_number_g"}.
{jsx, [{integer,7},end_json]}.
{json, "naked_number_g.json"}.

View file

@ -1 +0,0 @@
"this is a naked string"

View file

@ -1,3 +0,0 @@
{name, "naked_string"}.
{jsx, [{string,<<"this is a naked string">>},end_json]}.
{json, "naked_string.json"}.

View file

@ -1 +0,0 @@
[[-0], -0, {"key":-0, "another key":-0}, -0 ]

View file

@ -1,14 +0,0 @@
{name, "negative_zero"}.
{jsx, [start_array,start_array,
{integer,0},
end_array,
{integer,0},
start_object,
{key,<<"key">>},
{integer,0},
{key,<<"another key">>},
{integer,0},
end_object,
{integer,0},
end_array,end_json]}.
{json, "negative_zero.json"}.

View file

@ -1 +0,0 @@
null

View file

@ -1,3 +0,0 @@
{name, "null"}.
{jsx, [{literal, null},end_json]}.
{json, "null.json"}.

View file

@ -1 +0,0 @@
[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 42, 127, 99999999999999999999999999999, 1e1, 1E1, 1.0e1, 1.325e278, -1, -1e-1, 3.7e-78 ]

View file

@ -1,23 +0,0 @@
{name, "numbers"}.
{jsx, [start_array,
{integer,1},
{integer,2},
{integer,3},
{integer,4},
{integer,5},
{integer,6},
{integer,7},
{integer,8},
{integer,9},
{integer,42},
{integer,127},
{integer,99999999999999999999999999999},
{float,1.0e1},
{float,1.0e1},
{float,1.0e1},
{float,1.325e278},
{integer,-1},
{float,-1.0e-1},
{float,3.7e-78},
end_array,end_json]}.
{json, "numbers.json"}.

View file

@ -1 +0,0 @@
{"foo":"bar", "baz":true, "false":null,"object":{ "key" : "value" },"list":[null,null,null,[],"\n\r\\"]}

View file

@ -1,22 +0,0 @@
{name, "object"}.
{jsx, [start_object,
{key,<<"foo">>},
{string,<<"bar">>},
{key,<<"baz">>},
{literal,true},
{key,<<"false">>},
{literal,null},
{key,<<"object">>},
start_object,
{key,<<"key">>},
{string,<<"value">>},
end_object,
{key,<<"list">>},
start_array,
{literal,null},
{literal,null},
{literal,null},
start_array,end_array,
{string,<<"\n\r\\">>},
end_array,end_object,end_json]}.
{json, "object.json"}.

View file

@ -1 +0,0 @@
[ "this is a random string with \n embedded\u0020escapes in it" ]

View file

@ -1,5 +0,0 @@
{name, "string"}.
{jsx, [start_array,
{string,<<"this is a random string with \n embedded escapes in it">>},
end_array,end_json]}.
{json, "string.json"}.

View file

@ -1 +0,0 @@
["\"", "\\", "\b", "\f", "\n", "\r", "\t"]

View file

@ -1,11 +0,0 @@
{name, "string_escapes"}.
{jsx, [start_array,
{string,<<"\"">>},
{string,<<"\\">>},
{string,<<"\b">>},
{string,<<"\f">>},
{string,<<"\n">>},
{string,<<"\r">>},
{string,<<"\t">>},
end_array,end_json]}.
{json, "string_escapes.json"}.

View file

@ -1 +0,0 @@
true

View file

@ -1,3 +0,0 @@
{name, "true"}.
{jsx, [{literal, true},end_json]}.
{json, "true.json"}.

View file

@ -1 +0,0 @@
[[[[]]]

View file

@ -1,3 +0,0 @@
{name, "unbalanced array"}.
{jsx, {error, badarg}}.
{json, "unbalanced_array.json"}.

View file

@ -1 +0,0 @@
[ "arabic letter alef: ", "\u0627" ]

View file

@ -1,6 +0,0 @@
{name, "unicode_to_codepoint"}.
{jsx, [start_array,
{string,<<"arabic letter alef: ">>},
{string,<<1575/utf8>>},
end_array,end_json]}.
{json, "unicode_to_codepoint.json"}.

View file

@ -1 +0,0 @@
["\ud801blah"]

View file

@ -1,3 +0,0 @@
{name, "unpaired_surrogate"}.
{jsx, {error, badarg}}.
{json, "unpaired_surrogate.json"}.

View file

@ -1 +0,0 @@
["\ud801blah"]

View file

@ -1,4 +0,0 @@
{name, "unpaired surrogate replaced"}.
{jsx, [start_array,{string,<<65533/utf8,$b,$l,$a,$h>>},end_array,end_json]}.
{json, "unpaired_surrogate_replaced.json"}.
{jsx_flags, [replaced_bad_utf8]}.

View file

@ -1,4 +0,0 @@
[0.3]

View file

@ -1,3 +0,0 @@
{name, "whitespace"}.
{jsx, [start_array,{float,0.3},end_array,end_json]}.
{json, "whitespace.json"}.

View file

@ -1 +0,0 @@
[[0], 0, {"key":0, "another key":0}, 0 ]

View file

@ -1,14 +0,0 @@
{name, "zero"}.
{jsx, [start_array,start_array,
{integer,0},
end_array,
{integer,0},
start_object,
{key,<<"key">>},
{integer,0},
{key,<<"another key">>},
{integer,0},
end_object,
{integer,0},
end_array,end_json]}.
{json, "zero.json"}.

View file

@ -34,9 +34,8 @@
-export_type([json_term/0, json_text/0]).
%% test handler
-ifdef(TEST).
-export([init/1, handle_event/2]).
-include("jsx_tests.hrl").
-endif.
@ -53,24 +52,24 @@
-spec encode(Source::json_term()) -> json_text() | {incomplete, encoder()}.
-spec encode(Source::json_term(), Opts::jsx_to_json:opts()) -> json_text() | {incomplete, encoder()}.
-spec encode(Source::json_term(), Config::jsx_to_json:config()) -> json_text() | {incomplete, encoder()}.
encode(Source) -> encode(Source, []).
encode(Source, Opts) -> jsx_to_json:to_json(Source, Opts).
encode(Source, Config) -> jsx_to_json:to_json(Source, Config).
%% old api, alias for encode/x
to_json(Source) -> encode(Source, []).
to_json(Source, Opts) -> encode(Source, Opts).
to_json(Source, Config) -> encode(Source, Config).
term_to_json(Source) -> encode(Source, []).
term_to_json(Source, Opts) -> encode(Source, Opts).
term_to_json(Source, Config) -> encode(Source, Config).
-spec format(Source::json_text()) -> json_text() | {incomplete, decoder()}.
-spec format(Source::json_text(), Opts::jsx_to_json:opts()) -> json_text() | {incomplete, decoder()}.
-spec format(Source::json_text(), Config::jsx_to_json:config()) -> json_text() | {incomplete, decoder()}.
format(Source) -> format(Source, []).
format(Source, Opts) -> jsx_to_json:format(Source, Opts).
format(Source, Config) -> jsx_to_json:format(Source, Config).
-spec minify(Source::json_text()) -> json_text() | {incomplete, decoder()}.
@ -84,45 +83,45 @@ prettify(Source) -> format(Source, [space, {indent, 2}]).
-spec decode(Source::json_text()) -> json_term() | {incomplete, decoder()}.
-spec decode(Source::json_text(), Opts::jsx_to_term:opts()) -> json_term() | {incomplete, decoder()}.
-spec decode(Source::json_text(), Config::jsx_to_term:config()) -> json_term() | {incomplete, decoder()}.
decode(Source) -> decode(Source, []).
decode(Source, Opts) -> jsx_to_term:to_term(Source, Opts).
decode(Source, Config) -> jsx_to_term:to_term(Source, Config).
%% old api, alias for to_term/x
to_term(Source) -> decode(Source, []).
to_term(Source, Opts) -> decode(Source, Opts).
to_term(Source, Config) -> decode(Source, Config).
json_to_term(Source) -> decode(Source, []).
json_to_term(Source, Opts) -> decode(Source, Opts).
json_to_term(Source, Config) -> decode(Source, Config).
-spec is_json(Source::any()) -> true | false.
-spec is_json(Source::any(), Opts::jsx_verify:opts()) -> true | false.
-spec is_json(Source::any(), Config::jsx_verify:config()) -> true | false.
is_json(Source) -> is_json(Source, []).
is_json(Source, Opts) -> jsx_verify:is_json(Source, Opts).
is_json(Source, Config) -> jsx_verify:is_json(Source, Config).
-spec is_term(Source::any()) -> true | false.
-spec is_term(Source::any(), Opts::jsx_verify:opts()) -> true | false.
-spec is_term(Source::any(), Config::jsx_verify:config()) -> true | false.
is_term(Source) -> is_term(Source, []).
is_term(Source, Opts) -> jsx_verify:is_term(Source, Opts).
is_term(Source, Config) -> jsx_verify:is_term(Source, Config).
-type decoder() :: fun((json_text() | end_stream) -> any()).
-spec decoder(Handler::module(), State::any(), Opts::list()) -> decoder().
-spec decoder(Handler::module(), State::any(), Config::list()) -> decoder().
decoder(Handler, State, Opts) -> jsx_decoder:decoder(Handler, State, Opts).
decoder(Handler, State, Config) -> jsx_decoder:decoder(Handler, State, Config).
-type encoder() :: fun((json_term() | end_stream) -> any()).
-spec encoder(Handler::module(), State::any(), Opts::list()) -> encoder().
-spec encoder(Handler::module(), State::any(), Config::list()) -> encoder().
encoder(Handler, State, Opts) -> jsx_encoder:encoder(Handler, State, Opts).
encoder(Handler, State, Config) -> jsx_encoder:encoder(Handler, State, Config).
-type token() :: [token()]
@ -149,242 +148,6 @@ encoder(Handler, State, Opts) -> jsx_encoder:encoder(Handler, State, Opts).
-type parser() :: fun((token() | end_stream) -> any()).
-spec parser(Handler::module(), State::any(), Opts::list()) -> parser().
-spec parser(Handler::module(), State::any(), Config::list()) -> parser().
parser(Handler, State, Opts) -> jsx_parser:parser(Handler, State, Opts).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
jsx_decoder_test_() ->
jsx_decoder_gen(load_tests(code:lib_dir(jsx, priv) ++ "/test_cases/")).
encoder_decoder_equiv_test_() ->
[
{"encoder/decoder equivalency",
?_assert((jsx_decoder:decoder(?MODULE, [], []))(
<<"[\"a\", 17, 3.14, true, {\"k\":false}, []]">>
) =:= (jsx_encoder:encoder(?MODULE, [], []))([<<"a">>, 17, 3.14, true, [{<<"k">>, false}], []])
)
}
].
partial_numbers_test_() ->
[
{"partial integer",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"integer\": 12345">>),
F(<<"67890}">>)
end =:= [{<<"integer">>, 1234567890}]
)
},
{"partial integer",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"integer\": 1234567890">>),
F(<<"}">>)
end =:= [{<<"integer">>, 1234567890}]
)
},
{"partial float",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"float\": 1.">>),
F(<<"23}">>)
end =:= [{<<"float">>, 1.23}]
)
},
{"partial float",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"float\": 1.2">>),
F(<<"3}">>)
end =:= [{<<"float">>, 1.23}]
)
},
{"partial float",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"float\": 1.23">>),
F(<<"}">>)
end =:= [{<<"float">>, 1.23}]
)
},
{"partial exp",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"exp\": 1.0e">>),
F(<<"1}">>)
end =:= [{<<"exp">>, 10.0}]
)
},
{"partial exp",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"exp\": 1.0e1">>),
F(<<"2}">>)
end =:= [{<<"exp">>, 1.0e12}]
)
},
{"partial exp",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"exp\": 1.0e1">>),
F(<<"}">>)
end =:= [{<<"exp">>, 10.0}]
)
},
{"partial zero",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"zero\": 0">>),
F(<<".0}">>)
end =:= [{<<"zero">>, 0.0}]
)
},
{"partial zero",
?_assert(begin
{incomplete, F} = jsx:decode(<<"{\"zero\": 0">>),
F(<<"}">>)
end =:= [{<<"zero">>, 0}]
)
}
].
single_quoted_strings_test_() ->
[
{"single quoted keys",
?_assertEqual(
to_term(<<"{'key':true}">>, [single_quoted_strings]),
[{<<"key">>, true}]
)
},
{"multiple single quoted keys",
?_assertEqual(
to_term(<<"{'key':true, 'another key':true}">>, [single_quoted_strings]),
[{<<"key">>, true}, {<<"another key">>, true}]
)
},
{"nested single quoted keys",
?_assertEqual(
to_term(<<"{'key': {'key':true, 'another key':true}}">>, [single_quoted_strings]),
[{<<"key">>, [{<<"key">>, true}, {<<"another key">>, true}]}]
)
},
{"single quoted string",
?_assertEqual(
to_term(<<"['string']">>, [single_quoted_strings]),
[<<"string">>]
)
},
{"single quote in double quoted string",
?_assertEqual(
to_term(<<"[\"a single quote: '\"]">>, [single_quoted_strings]),
[<<"a single quote: '">>]
)
},
{"escaped single quote in single quoted string",
?_assertEqual(
to_term(<<"['a single quote: \\'']">>, [single_quoted_strings]),
[<<"a single quote: '">>]
)
},
{"escaped single quote when single quotes are disallowed",
?_assertError(
badarg,
to_term(<<"[\"a single quote: \\'\"]">>)
)
},
{"mismatched quotes",
?_assertError(
badarg,
to_term(<<"['mismatched\"]">>, [single_quoted_strings])
)
}
].
%% test handler
init([]) -> [].
handle_event(end_json, State) -> lists:reverse([end_json] ++ State);
handle_event(Event, State) -> [Event] ++ State.
jsx_decoder_gen([]) -> [];
jsx_decoder_gen([Test|Rest]) ->
Name = proplists:get_value(name, Test),
JSON = proplists:get_value(json, Test),
JSX = proplists:get_value(jsx, Test),
Flags = proplists:get_value(jsx_flags, Test, []),
{generator, fun() ->
[{Name, ?_assertEqual(test_decode(JSON, Flags), JSX)},
{Name ++ " (incremental)",
?_assertEqual(incremental_decode(JSON, Flags), JSX)
}
| jsx_decoder_gen(Rest)
]
end}.
load_tests(Path) ->
%% search the specified directory for any files with the .test ending
TestSpecs = filelib:wildcard("*.test", Path),
load_tests(TestSpecs, Path, []).
load_tests([], _Dir, Acc) ->
lists:reverse(Acc);
load_tests([Test|Rest], Dir, Acc) ->
case file:consult(Dir ++ "/" ++ Test) of
{ok, TestSpec} ->
ParsedTest = parse_tests(TestSpec, Dir),
load_tests(Rest, Dir, [ParsedTest] ++ Acc)
; {error, _Reason} ->
erlang:error(badarg, [Test|Rest], Dir, Acc)
end.
parse_tests(TestSpec, Dir) ->
parse_tests(TestSpec, Dir, []).
parse_tests([{json, Path}|Rest], Dir, Acc) when is_list(Path) ->
case file:read_file(Dir ++ "/" ++ Path) of
{ok, Bin} -> parse_tests(Rest, Dir, [{json, Bin}] ++ Acc)
; _ -> erlang:error(badarg, [[{json, Path}|Rest], Dir, Acc])
end;
parse_tests([KV|Rest], Dir, Acc) ->
parse_tests(Rest, Dir, [KV] ++ Acc);
parse_tests([], _Dir, Acc) ->
Acc.
test_decode(JSON, Flags) ->
try
case (jsx_decoder:decoder(?MODULE, [], Flags))(JSON) of
{incomplete, More} ->
case More(<<" ">>) of
{incomplete, _} -> {error, badarg}
; Events -> Events
end
; Events -> Events
end
catch
error:badarg -> {error, badarg}
end.
incremental_decode(<<C:1/binary, Rest/binary>>, Flags) ->
P = jsx_decoder:decoder(?MODULE, [], Flags ++ [explicit_end]),
try incremental_decode_loop(P(C), Rest)
catch error:badarg -> {error, badarg}
end.
incremental_decode_loop({incomplete, More}, <<>>) ->
case More(end_stream) of
{incomplete, _} -> {error, badarg}
; X -> X
end;
incremental_decode_loop({incomplete, More}, <<C:1/binary, Rest/binary>>) ->
incremental_decode_loop(More(C), Rest).
-endif.
parser(Handler, State, Config) -> jsx_parser:parser(Handler, State, Config).

View file

@ -1,4 +1,4 @@
-record(opts, {
-record(config, {
replaced_bad_utf8 = false,
escaped_forward_slashes = false,
single_quoted_strings = false,

File diff suppressed because it is too large Load diff

View file

@ -25,20 +25,20 @@
-export([encoder/3]).
-spec encoder(Handler::module(), State::any(), Opts::jsx:opts()) -> jsx:encoder().
-spec encoder(Handler::module(), State::any(), Config::jsx:config()) -> jsx:encoder().
encoder(Handler, State, Opts) ->
encoder(Handler, State, Config) ->
fun(JSON) ->
start(
JSON,
{Handler, Handler:init(State)},
jsx_utils:parse_opts(Opts)
jsx_utils:parse_config(Config)
)
end.
-include("jsx_opts.hrl").
-include("jsx_config.hrl").
-ifndef(error).
@ -48,86 +48,86 @@ encoder(Handler, State, Opts) ->
-endif.
start(Term, {Handler, State}, Opts) ->
Handler:handle_event(end_json, value(pre_encode(Term, Opts), {Handler, State}, Opts)).
start(Term, {Handler, State}, Config) ->
Handler:handle_event(end_json, value(pre_encode(Term, Config), {Handler, State}, Config)).
value(String, {Handler, State}, Opts) when is_binary(String) ->
Handler:handle_event({string, clean_string(String, Opts)}, State);
value(Float, {Handler, State}, _Opts) when is_float(Float) ->
value(String, {Handler, State}, Config) when is_binary(String) ->
Handler:handle_event({string, clean_string(String, Config)}, State);
value(Float, {Handler, State}, _Config) when is_float(Float) ->
Handler:handle_event({float, Float}, State);
value(Int, {Handler, State}, _Opts) when is_integer(Int) ->
value(Int, {Handler, State}, _Config) when is_integer(Int) ->
Handler:handle_event({integer, Int}, State);
value(Literal, {Handler, State}, _Opts)
value(Literal, {Handler, State}, _Config)
when Literal == true; Literal == false; Literal == null ->
Handler:handle_event({literal, Literal}, State);
value([{}], {Handler, State}, _Opts) ->
value([{}], {Handler, State}, _Config) ->
Handler:handle_event(end_object, Handler:handle_event(start_object, State));
value([], {Handler, State}, _Opts) ->
value([], {Handler, State}, _Config) ->
Handler:handle_event(end_array, Handler:handle_event(start_array, State));
value([Tuple|_] = List, Handler, Opts) when is_tuple(Tuple) ->
list_or_object(List, Handler, Opts);
value(List, Handler, Opts) when is_list(List) ->
list_or_object(List, Handler, Opts);
value(Term, Handler, Opts) -> ?error([Term, Handler, Opts]).
value([Tuple|_] = List, Handler, Config) when is_tuple(Tuple) ->
list_or_object(List, Handler, Config);
value(List, Handler, Config) when is_list(List) ->
list_or_object(List, Handler, Config);
value(Term, Handler, Config) -> ?error([Term, Handler, Config]).
list_or_object([Term|Rest], {Handler, State}, Opts) ->
case pre_encode(Term, Opts) of
list_or_object([Term|Rest], {Handler, State}, Config) ->
case pre_encode(Term, Config) of
{K, V} ->
object([{K, V}|Rest], {Handler, Handler:handle_event(start_object, State)}, Opts)
object([{K, V}|Rest], {Handler, Handler:handle_event(start_object, State)}, Config)
; T ->
list([T|Rest], {Handler, Handler:handle_event(start_array, State)}, Opts)
list([T|Rest], {Handler, Handler:handle_event(start_array, State)}, Config)
end.
object([{Key, Value}, Next|Rest], {Handler, State}, Opts) when is_atom(Key); is_binary(Key) ->
V = pre_encode(Value, Opts),
object([{Key, Value}, Next|Rest], {Handler, State}, Config) when is_atom(Key); is_binary(Key) ->
V = pre_encode(Value, Config),
object(
[pre_encode(Next, Opts)|Rest],
[pre_encode(Next, Config)|Rest],
{
Handler,
value(
V,
{Handler, Handler:handle_event({key, clean_string(fix_key(Key), Opts)}, State)},
Opts
{Handler, Handler:handle_event({key, clean_string(fix_key(Key), Config)}, State)},
Config
)
},
Opts
Config
);
object([{Key, Value}], {Handler, State}, Opts) when is_atom(Key); is_binary(Key) ->
object([{Key, Value}], {Handler, State}, Config) when is_atom(Key); is_binary(Key) ->
object(
[],
{
Handler,
value(
pre_encode(Value, Opts),
{Handler, Handler:handle_event({key, clean_string(fix_key(Key), Opts)}, State)},
Opts
pre_encode(Value, Config),
{Handler, Handler:handle_event({key, clean_string(fix_key(Key), Config)}, State)},
Config
)
},
Opts
Config
);
object([], {Handler, State}, _Opts) -> Handler:handle_event(end_object, State);
object(Term, Handler, Opts) -> ?error([Term, Handler, Opts]).
object([], {Handler, State}, _Config) -> Handler:handle_event(end_object, State);
object(Term, Handler, Config) -> ?error([Term, Handler, Config]).
list([Value, Next|Rest], {Handler, State}, Opts) ->
list([pre_encode(Next, Opts)|Rest], {Handler, value(Value, {Handler, State}, Opts)}, Opts);
list([Value], {Handler, State}, Opts) ->
list([], {Handler, value(Value, {Handler, State}, Opts)}, Opts);
list([], {Handler, State}, _Opts) -> Handler:handle_event(end_array, State);
list(Term, Handler, Opts) -> ?error([Term, Handler, Opts]).
list([Value, Next|Rest], {Handler, State}, Config) ->
list([pre_encode(Next, Config)|Rest], {Handler, value(Value, {Handler, State}, Config)}, Config);
list([Value], {Handler, State}, Config) ->
list([], {Handler, value(Value, {Handler, State}, Config)}, Config);
list([], {Handler, State}, _Config) -> Handler:handle_event(end_array, State);
list(Term, Handler, Config) -> ?error([Term, Handler, Config]).
pre_encode(Value, #opts{pre_encode=false}) -> Value; pre_encode(Value, Opts) ->
(Opts#opts.pre_encode)(Value).
pre_encode(Value, #config{pre_encode=false}) -> Value;
pre_encode(Value, Config) -> (Config#config.pre_encode)(Value).
fix_key(Key) when is_atom(Key) -> fix_key(atom_to_binary(Key, utf8));
fix_key(Key) when is_binary(Key) -> Key.
clean_string(Bin, Opts) -> jsx_utils:clean_string(Bin, Opts).
clean_string(Bin, Config) -> jsx_utils:clean_string(Bin, Config).
@ -135,87 +135,20 @@ clean_string(Bin, Opts) -> jsx_utils:clean_string(Bin, Opts).
-include_lib("eunit/include/eunit.hrl").
encode(Term) -> encode(Term, []).
encode(Term, Opts) ->
try (encoder(jsx, [], Opts))(Term)
catch _:_ -> {error, badarg}
end.
encode_test_() ->
Data = jsx:test_cases(),
[
{"naked string", ?_assertEqual(encode(<<"a string\n">>), [{string, <<"a string\n">>}, end_json])},
{"escaped naked string", ?_assertEqual(encode(<<"a string\n">>, [escaped_strings]), [{string, <<"a string\\n">>}, end_json])},
{"naked integer", ?_assertEqual(encode(123), [{integer, 123}, end_json])},
{"naked float", ?_assertEqual(encode(1.23), [{float, 1.23}, end_json])},
{"naked literal", ?_assertEqual(encode(null), [{literal, null}, end_json])},
{"empty object", ?_assertEqual(encode([{}]), [start_object, end_object, end_json])},
{"empty list", ?_assertEqual(encode([]), [start_array, end_array, end_json])},
{"simple list", ?_assertEqual(
encode([1,2,3,true,false]),
[
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
{literal, true},
{literal, false},
end_array,
end_json
]
{
Title, ?_assertEqual(
Events ++ [end_json],
start(Term, {jsx, []}, #config{})
)
},
{"simple object", ?_assertEqual(
encode([{<<"a">>, true}, {<<"b">>, false}]),
[
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
end_object,
end_json
]
)
},
{"complex term", ?_assertEqual(
encode([
{<<"a">>, true},
{<<"b">>, false},
{<<"c">>, [1,2,3]},
{<<"d">>, [{<<"key">>, <<"value">>}]}
]),
[
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
{key, <<"c">>},
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
end_array,
{key, <<"d">>},
start_object,
{key, <<"key">>},
{string, <<"value">>},
end_object,
end_object,
end_json
]
)
},
{"atom keys", ?_assertEqual(
encode([{key, <<"value">>}]),
[start_object, {key, <<"key">>}, {string, <<"value">>}, end_object, end_json]
)
}
} || {Title, _, Term, Events} <- Data
].
encode(Term, Config) -> start(Term, {jsx, []}, jsx_utils:parse_config(Config)).
pre_encoders_test_() ->
Term = [
{<<"object">>, [
@ -226,7 +159,6 @@ pre_encoders_test_() ->
],
[
{"no pre encode", ?_assertEqual(
encode(Term, []),
[
start_object,
{key, <<"object">>}, start_object,
@ -242,10 +174,10 @@ pre_encoders_test_() ->
end_object,
end_object,
end_json
]
],
encode(Term, [])
)},
{"replace lists with empty lists", ?_assertEqual(
encode(Term, [{pre_encode, fun(V) -> case V of [{_,_}|_] -> V; [{}] -> V; V when is_list(V) -> []; _ -> V end end}]),
[
start_object,
{key, <<"object">>}, start_object,
@ -255,18 +187,18 @@ pre_encoders_test_() ->
end_object,
end_object,
end_json
]
],
encode(Term, [{pre_encode, fun(V) -> case V of [{_,_}|_] -> V; [{}] -> V; V when is_list(V) -> []; _ -> V end end}])
)},
{"replace objects with empty objects", ?_assertEqual(
encode(Term, [{pre_encode, fun(V) -> case V of [{_,_}|_] -> [{}]; _ -> V end end}]),
[
start_object,
end_object,
end_json
]
],
encode(Term, [{pre_encode, fun(V) -> case V of [{_,_}|_] -> [{}]; _ -> V end end}])
)},
{"replace all non-list and non_tuple values with false", ?_assertEqual(
encode(Term, [{pre_encode, fun(V) when is_list(V); is_tuple(V) -> V; (_) -> false end}]),
[
start_object,
{key, <<"object">>}, start_object,
@ -282,10 +214,10 @@ pre_encoders_test_() ->
end_object,
end_object,
end_json
]
],
encode(Term, [{pre_encode, fun(V) when is_list(V); is_tuple(V) -> V; (_) -> false end}])
)},
{"replace all atoms with atom_to_list", ?_assertEqual(
encode(Term, [{pre_encode, fun(V) when is_atom(V) -> unicode:characters_to_binary(atom_to_list(V)); (V) -> V end}]),
[
start_object,
{key, <<"object">>}, start_object,
@ -301,206 +233,46 @@ pre_encoders_test_() ->
end_object,
end_object,
end_json
]
],
encode(Term, [{pre_encode, fun(V) when is_atom(V) -> unicode:characters_to_binary(atom_to_list(V)); (V) -> V end}])
)},
{"pre_encode tuple", ?_assertEqual(
encode({1, 2, 3}, [{pre_encode, fun(Tuple) when is_tuple(Tuple) -> tuple_to_list(Tuple); (V) -> V end}]),
[
start_array,
{integer, 1}, {integer, 2}, {integer, 3},
end_array,
end_json
]
],
encode({1, 2, 3}, [{pre_encode, fun(Tuple) when is_tuple(Tuple) -> tuple_to_list(Tuple); (V) -> V end}])
)},
{"pre_encode 2-tuples", ?_assertEqual(
encode([{two, 1}, {three, 2}], [{pre_encode, fun({K, V}) -> {K, V + 1}; (V) -> V end}]),
[
start_object,
{key, <<"two">>}, {integer, 2}, {key, <<"three">>}, {integer, 3},
end_object,
end_json
]
],
encode([{two, 1}, {three, 2}], [{pre_encode, fun({K, V}) -> {K, V + 1}; (V) -> V end}])
)},
{"pre_encode one field record", ?_assertEqual(
encode([{foo, bar}], [{pre_encode, fun({foo, V}) -> {V, undefined}; (undefined) -> false; (V) -> V end}]),
[
start_object,
{key, <<"bar">>}, {literal, false},
end_object,
end_json
]
],
encode([{foo, bar}], [{pre_encode, fun({foo, V}) -> {V, undefined}; (undefined) -> false; (V) -> V end}])
)},
{"pre_encode list", ?_assertEqual(
encode([1,2,3], [{pre_encode, fun(X) when is_integer(X) -> X + 1; (V) -> V end}]),
[
start_array,
{integer, 2}, {integer, 3}, {integer, 4},
end_array,
end_json
]
],
encode([1,2,3], [{pre_encode, fun(X) when is_integer(X) -> X + 1; (V) -> V end}])
)}
].
escapes_test_() ->
[
{"backspace escape", ?_assertEqual(encode(<<"\b">>, [escaped_strings]), [{string, <<"\\b">>}, end_json])},
{"formfeed escape", ?_assertEqual(encode(<<"\f">>, [escaped_strings]), [{string, <<"\\f">>}, end_json])},
{"newline escape", ?_assertEqual(encode(<<"\n">>, [escaped_strings]), [{string, <<"\\n">>}, end_json])},
{"carriage return escape", ?_assertEqual(encode(<<"\r">>, [escaped_strings]), [{string, <<"\\r">>}, end_json])},
{"tab escape", ?_assertEqual(encode(<<"\t">>, [escaped_strings]), [{string, <<"\\t">>}, end_json])},
{"quote escape", ?_assertEqual(encode(<<"\"">>, [escaped_strings]), [{string, <<"\\\"">>}, end_json])},
{"single quote escape", ?_assertEqual(encode(<<"'">>, [escaped_strings, single_quoted_strings]), [{string, <<"\\'">>}, end_json])},
{"no single quote escape", ?_assertEqual(encode(<<"'">>, [escaped_strings]), [{string, <<"'">>}, end_json])},
{"forward slash escape", ?_assertEqual(encode(<<"/">>, [escaped_strings, escaped_forward_slashes]), [{string, <<"\\/">>}, end_json])},
{"no forward slash escape", ?_assertEqual(encode(<<"/">>, [escaped_strings]), [{string, <<"/">>}, end_json])},
{"back slash escape", ?_assertEqual(encode(<<"\\">>, [escaped_strings]), [{string, <<"\\\\">>}, end_json])},
{"jsonp escape", ?_assertEqual(
encode(<<16#2028/utf8, 16#2029/utf8>>, [escaped_strings]),
[{string, <<"\\u2028\\u2029">>}, end_json]
)},
{"no jsonp escape", ?_assertEqual(
encode(<<16#2028/utf8, 16#2029/utf8>>, [escaped_strings, unescaped_jsonp]),
[{string, <<16#2028/utf8, 16#2029/utf8>>}, end_json]
)},
{"control escape", ?_assertEqual(encode(<<0>>, [escaped_strings]), [{string, <<"\\u0000">>}, end_json])},
{"dirty strings", ?_assertEqual(encode(<<"\n">>, [escaped_strings, dirty_strings]), [{string, <<"\n">>}, end_json])},
{"ignore bad escapes", ?_assertEqual(encode(<<"\\x25">>, [escaped_strings, ignored_bad_escapes]), [{string, <<"\\\\x25">>}, end_json])}
].
surrogates_test_() ->
[
{"surrogates - badarg",
?_assert(check_bad(surrogates()))
},
{"surrogates - replaced",
?_assert(check_replaced(surrogates()))
}
].
good_characters_test_() ->
[
{"acceptable codepoints",
?_assert(check_good(good()))
},
{"acceptable codepoints - escaped_strings",
?_assert(check_good(good(), [escaped_strings]))
},
{"acceptable codepoints - replaced_bad_utf8",
?_assert(check_good(good(), [escaped_strings]))
},
{"acceptable codepoints - escaped_strings + replaced_bad_utf8",
?_assert(check_good(good(), [escaped_strings, replaced_bad_utf8]))
},
{"acceptable extended",
?_assert(check_good(good_extended()))
},
{"acceptable extended - escaped_strings",
?_assert(check_good(good_extended(), [escaped_strings]))
},
{"acceptable extended - escaped_strings",
?_assert(check_good(good_extended(), [replaced_bad_utf8]))
}
].
reserved_test_() ->
[
{"reserved noncharacters - badarg",
?_assert(check_bad(reserved_space()))
},
{"reserved noncharacters - replaced",
?_assert(check_replaced(reserved_space()))
}
].
noncharacters_test_() ->
[
{"noncharacters - badarg",
?_assert(check_bad(noncharacters()))
},
{"noncharacters - replaced",
?_assert(check_replaced(noncharacters()))
}
].
extended_noncharacters_test_() ->
[
{"extended noncharacters - badarg",
?_assert(check_bad(extended_noncharacters()))
},
{"extended noncharacters - replaced",
?_assert(check_replaced(extended_noncharacters()))
}
].
check_bad(List) ->
[] == lists:dropwhile(fun({_, {error, badarg}}) -> true ; (_) -> false end,
check(List, [], [])
).
check_replaced(List) ->
[] == lists:dropwhile(fun({_, [{string, <<16#fffd/utf8>>}|_]}) -> true ; (_) -> false
end,
check(List, [replaced_bad_utf8], [])
).
check_good(List) -> check_good(List, []).
check_good(List, Opts) ->
[] == lists:dropwhile(fun({_, [{string, _}|_]}) -> true ; (_) -> false end,
check(List, Opts, [])
).
check([], _Opts, Acc) -> Acc;
check([H|T], Opts, Acc) ->
R = encode(to_fake_utf(H, utf8), Opts),
check(T, Opts, [{H, R}] ++ Acc).
noncharacters() -> lists:seq(16#fffe, 16#ffff).
extended_noncharacters() ->
[16#1fffe, 16#1ffff, 16#2fffe, 16#2ffff]
++ [16#3fffe, 16#3ffff, 16#4fffe, 16#4ffff]
++ [16#5fffe, 16#5ffff, 16#6fffe, 16#6ffff]
++ [16#7fffe, 16#7ffff, 16#8fffe, 16#8ffff]
++ [16#9fffe, 16#9ffff, 16#afffe, 16#affff]
++ [16#bfffe, 16#bffff, 16#cfffe, 16#cffff]
++ [16#dfffe, 16#dffff, 16#efffe, 16#effff]
++ [16#ffffe, 16#fffff, 16#10fffe, 16#10ffff].
surrogates() -> lists:seq(16#d800, 16#dfff).
reserved_space() -> lists:seq(16#fdd0, 16#fdef).
good() -> lists:seq(16#0000, 16#d7ff) ++ lists:seq(16#e000, 16#fdcf) ++ lists:seq(16#fdf0, 16#fffd).
good_extended() -> [16#10000, 16#20000, 16#30000, 16#40000, 16#50000,
16#60000, 16#70000, 16#80000, 16#90000, 16#a0000,
16#b0000, 16#c0000, 16#d0000, 16#e0000, 16#f0000
] ++ lists:seq(16#100000, 16#10fffd).
%% erlang refuses to encode certain codepoints, so fake them all
to_fake_utf(N, utf8) when N < 16#0080 -> <<N:8>>;
to_fake_utf(N, utf8) when N < 16#0800 ->
<<0:5, Y:5, X:6>> = <<N:16>>,
<<2#110:3, Y:5, 2#10:2, X:6>>;
to_fake_utf(N, utf8) when N < 16#10000 ->
<<Z:4, Y:6, X:6>> = <<N:16>>,
<<2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6>>;
to_fake_utf(N, utf8) ->
<<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>,
<<2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6>>.
-endif.

View file

@ -26,13 +26,13 @@
-export([parser/3]).
-spec parser(Handler::module(), State::any(), Opts::jsx:opts()) -> jsx:parser().
-spec parser(Handler::module(), State::any(), Config::jsx:config()) -> jsx:parser().
parser(Handler, State, Opts) ->
fun(Tokens) -> value(Tokens, {Handler, Handler:init(State)}, [], jsx_utils:parse_opts(Opts)) end.
parser(Handler, State, Config) ->
fun(Tokens) -> value(Tokens, {Handler, Handler:init(State)}, [], jsx_utils:parse_config(Config)) end.
-include("jsx_opts.hrl").
-include("jsx_config.hrl").
%% error, incomplete and event macros
@ -44,124 +44,124 @@ parser(Handler, State, Opts) ->
-ifndef(incomplete).
-define(incomplete(State, Handler, Stack, Opts),
-define(incomplete(State, Handler, Stack, Config),
{incomplete, fun(end_stream) ->
case State([end_json],
Handler,
Stack,
Opts) of
{incomplete, _} -> ?error([Handler, Stack, Opts])
Config) of
{incomplete, _} -> ?error([Handler, Stack, Config])
; Events -> Events
end
; (Tokens) ->
State(Tokens, Handler, Stack, Opts)
State(Tokens, Handler, Stack, Config)
end
}
).
-endif.
handle_event([], Handler, _Opts) -> Handler;
handle_event([Event|Rest], Handler, Opts) -> handle_event(Rest, handle_event(Event, Handler, Opts), Opts);
handle_event(Event, {Handler, State}, _Opts) -> {Handler, Handler:handle_event(Event, State)}.
handle_event([], Handler, _Config) -> Handler;
handle_event([Event|Rest], Handler, Config) -> handle_event(Rest, handle_event(Event, Handler, Config), Config);
handle_event(Event, {Handler, State}, _Config) -> {Handler, Handler:handle_event(Event, State)}.
value([start_object|Tokens], Handler, Stack, Opts) ->
object(Tokens, handle_event(start_object, Handler, Opts), [object|Stack], Opts);
value([start_array|Tokens], Handler, Stack, Opts) ->
array(Tokens, handle_event(start_array, Handler, Opts), [array|Stack], Opts);
value([{literal, true}|Tokens], Handler, [], Opts) ->
done(Tokens, handle_event({literal, true}, Handler, Opts), [], Opts);
value([{literal, false}|Tokens], Handler, [], Opts) ->
done(Tokens, handle_event({literal, false}, Handler, Opts), [], Opts);
value([{literal, null}|Tokens], Handler, [], Opts) ->
done(Tokens, handle_event({literal, null}, Handler, Opts), [], Opts);
value([{literal, true}|Tokens], Handler, Stack, Opts) ->
maybe_done(Tokens, handle_event({literal, true}, Handler, Opts), Stack, Opts);
value([{literal, false}|Tokens], Handler, Stack, Opts) ->
maybe_done(Tokens, handle_event({literal, false}, Handler, Opts), Stack, Opts);
value([{literal, null}|Tokens], Handler, Stack, Opts) ->
maybe_done(Tokens, handle_event({literal, null}, Handler, Opts), Stack, Opts);
value([Literal|Tokens], Handler, Stack, Opts) when Literal == true; Literal == false; Literal == null ->
value([{literal, Literal}] ++ Tokens, Handler, Stack, Opts);
value([{integer, Number}|Tokens], Handler, [], Opts) when is_integer(Number) ->
done(Tokens, handle_event({integer, Number}, Handler, Opts), [], Opts);
value([{float, Number}|Tokens], Handler, [], Opts) when is_float(Number) ->
done(Tokens, handle_event({float, Number}, Handler, Opts), [], Opts);
value([{integer, Number}|Tokens], Handler, Stack, Opts) when is_integer(Number) ->
maybe_done(Tokens, handle_event({integer, Number}, Handler, Opts), Stack, Opts);
value([{float, Number}|Tokens], Handler, Stack, Opts) when is_float(Number) ->
maybe_done(Tokens, handle_event({float, Number}, Handler, Opts), Stack, Opts);
value([{number, Number}|Tokens], Handler, Stack, Opts) when is_integer(Number) ->
value([{integer, Number}] ++ Tokens, Handler, Stack, Opts);
value([{number, Number}|Tokens], Handler, Stack, Opts) when is_float(Number) ->
value([{float, Number}] ++ Tokens, Handler, Stack, Opts);
value([Number|Tokens], Handler, Stack, Opts) when is_integer(Number) ->
value([{integer, Number}] ++ Tokens, Handler, Stack, Opts);
value([Number|Tokens], Handler, Stack, Opts) when is_float(Number) ->
value([{float, Number}] ++ Tokens, Handler, Stack, Opts);
value([{string, String}|Tokens], Handler, [], Opts) when is_binary(String) ->
done(Tokens, handle_event({string, clean_string(String, Opts)}, Handler, Opts), [], Opts);
value([{string, String}|Tokens], Handler, Stack, Opts) when is_binary(String) ->
maybe_done(Tokens, handle_event({string, clean_string(String, Opts)}, Handler, Opts), Stack, Opts);
value([String|Tokens], Handler, Stack, Opts) when is_binary(String) ->
value([{string, String}] ++ Tokens, Handler, Stack, Opts);
value([], Handler, Stack, Opts) ->
?incomplete(value, Handler, Stack, Opts);
value(BadTokens, Handler, Stack, Opts) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Opts]);
value(Token, Handler, Stack, Opts) ->
value([Token], Handler, Stack, Opts).
value([start_object|Tokens], Handler, Stack, Config) ->
object(Tokens, handle_event(start_object, Handler, Config), [object|Stack], Config);
value([start_array|Tokens], Handler, Stack, Config) ->
array(Tokens, handle_event(start_array, Handler, Config), [array|Stack], Config);
value([{literal, true}|Tokens], Handler, [], Config) ->
done(Tokens, handle_event({literal, true}, Handler, Config), [], Config);
value([{literal, false}|Tokens], Handler, [], Config) ->
done(Tokens, handle_event({literal, false}, Handler, Config), [], Config);
value([{literal, null}|Tokens], Handler, [], Config) ->
done(Tokens, handle_event({literal, null}, Handler, Config), [], Config);
value([{literal, true}|Tokens], Handler, Stack, Config) ->
maybe_done(Tokens, handle_event({literal, true}, Handler, Config), Stack, Config);
value([{literal, false}|Tokens], Handler, Stack, Config) ->
maybe_done(Tokens, handle_event({literal, false}, Handler, Config), Stack, Config);
value([{literal, null}|Tokens], Handler, Stack, Config) ->
maybe_done(Tokens, handle_event({literal, null}, Handler, Config), Stack, Config);
value([Literal|Tokens], Handler, Stack, Config) when Literal == true; Literal == false; Literal == null ->
value([{literal, Literal}] ++ Tokens, Handler, Stack, Config);
value([{integer, Number}|Tokens], Handler, [], Config) when is_integer(Number) ->
done(Tokens, handle_event({integer, Number}, Handler, Config), [], Config);
value([{float, Number}|Tokens], Handler, [], Config) when is_float(Number) ->
done(Tokens, handle_event({float, Number}, Handler, Config), [], Config);
value([{integer, Number}|Tokens], Handler, Stack, Config) when is_integer(Number) ->
maybe_done(Tokens, handle_event({integer, Number}, Handler, Config), Stack, Config);
value([{float, Number}|Tokens], Handler, Stack, Config) when is_float(Number) ->
maybe_done(Tokens, handle_event({float, Number}, Handler, Config), Stack, Config);
value([{number, Number}|Tokens], Handler, Stack, Config) when is_integer(Number) ->
value([{integer, Number}] ++ Tokens, Handler, Stack, Config);
value([{number, Number}|Tokens], Handler, Stack, Config) when is_float(Number) ->
value([{float, Number}] ++ Tokens, Handler, Stack, Config);
value([Number|Tokens], Handler, Stack, Config) when is_integer(Number) ->
value([{integer, Number}] ++ Tokens, Handler, Stack, Config);
value([Number|Tokens], Handler, Stack, Config) when is_float(Number) ->
value([{float, Number}] ++ Tokens, Handler, Stack, Config);
value([{string, String}|Tokens], Handler, [], Config) when is_binary(String) ->
done(Tokens, handle_event({string, clean_string(String, Config)}, Handler, Config), [], Config);
value([{string, String}|Tokens], Handler, Stack, Config) when is_binary(String) ->
maybe_done(Tokens, handle_event({string, clean_string(String, Config)}, Handler, Config), Stack, Config);
value([String|Tokens], Handler, Stack, Config) when is_binary(String) ->
value([{string, String}] ++ Tokens, Handler, Stack, Config);
value([], Handler, Stack, Config) ->
?incomplete(value, Handler, Stack, Config);
value(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Config]);
value(Token, Handler, Stack, Config) ->
value([Token], Handler, Stack, Config).
object([end_object|Tokens], Handler, [object|Stack], Opts) ->
maybe_done(Tokens, handle_event(end_object, Handler, Opts), Stack, Opts);
object([{key, Key}|Tokens], Handler, Stack, Opts) when is_atom(Key); is_binary(Key) ->
value(Tokens, handle_event({key, clean_string(fix_key(Key), Opts)}, Handler, Opts), Stack, Opts);
object([Key|Tokens], Handler, Stack, Opts) when is_atom(Key); is_binary(Key) ->
value(Tokens, handle_event({key, clean_string(fix_key(Key), Opts)}, Handler, Opts), Stack, Opts);
object([], Handler, Stack, Opts) ->
?incomplete(object, Handler, Stack, Opts);
object(BadTokens, Handler, Stack, Opts) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Opts]);
object(Token, Handler, Stack, Opts) ->
object([Token], Handler, Stack, Opts).
object([end_object|Tokens], Handler, [object|Stack], Config) ->
maybe_done(Tokens, handle_event(end_object, Handler, Config), Stack, Config);
object([{key, Key}|Tokens], Handler, Stack, Config) when is_atom(Key); is_binary(Key) ->
value(Tokens, handle_event({key, clean_string(fix_key(Key), Config)}, Handler, Config), Stack, Config);
object([Key|Tokens], Handler, Stack, Config) when is_atom(Key); is_binary(Key) ->
value(Tokens, handle_event({key, clean_string(fix_key(Key), Config)}, Handler, Config), Stack, Config);
object([], Handler, Stack, Config) ->
?incomplete(object, Handler, Stack, Config);
object(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Config]);
object(Token, Handler, Stack, Config) ->
object([Token], Handler, Stack, Config).
array([end_array|Tokens], Handler, [array|Stack], Opts) ->
maybe_done(Tokens, handle_event(end_array, Handler, Opts), Stack, Opts);
array([], Handler, Stack, Opts) ->
?incomplete(array, Handler, Stack, Opts);
array(Tokens, Handler, Stack, Opts) when is_list(Tokens) ->
value(Tokens, Handler, Stack, Opts);
array(Token, Handler, Stack, Opts) ->
array([Token], Handler, Stack, Opts).
array([end_array|Tokens], Handler, [array|Stack], Config) ->
maybe_done(Tokens, handle_event(end_array, Handler, Config), Stack, Config);
array([], Handler, Stack, Config) ->
?incomplete(array, Handler, Stack, Config);
array(Tokens, Handler, Stack, Config) when is_list(Tokens) ->
value(Tokens, Handler, Stack, Config);
array(Token, Handler, Stack, Config) ->
array([Token], Handler, Stack, Config).
maybe_done([end_json], Handler, [], Opts) ->
done([], Handler, [], Opts);
maybe_done(Tokens, Handler, [object|_] = Stack, Opts) when is_list(Tokens) ->
object(Tokens, Handler, Stack, Opts);
maybe_done(Tokens, Handler, [array|_] = Stack, Opts) when is_list(Tokens) ->
array(Tokens, Handler, Stack, Opts);
maybe_done([], Handler, Stack, Opts) ->
?incomplete(maybe_done, Handler, Stack, Opts);
maybe_done(BadTokens, Handler, Stack, Opts) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Opts]);
maybe_done(Token, Handler, Stack, Opts) ->
maybe_done([Token], Handler, Stack, Opts).
maybe_done([end_json], Handler, [], Config) ->
done([], Handler, [], Config);
maybe_done(Tokens, Handler, [object|_] = Stack, Config) when is_list(Tokens) ->
object(Tokens, Handler, Stack, Config);
maybe_done(Tokens, Handler, [array|_] = Stack, Config) when is_list(Tokens) ->
array(Tokens, Handler, Stack, Config);
maybe_done([], Handler, Stack, Config) ->
?incomplete(maybe_done, Handler, Stack, Config);
maybe_done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Config]);
maybe_done(Token, Handler, Stack, Config) ->
maybe_done([Token], Handler, Stack, Config).
done(Tokens, Handler, [], Opts) when Tokens == [end_json]; Tokens == [] ->
{_, State} = handle_event(end_json, Handler, Opts),
done(Tokens, Handler, [], Config) when Tokens == [end_json]; Tokens == [] ->
{_, State} = handle_event(end_json, Handler, Config),
State;
done(BadTokens, Handler, Stack, Opts) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Opts]);
done(Token, Handler, Stack, Opts) ->
done([Token], Handler, Stack, Opts).
done(BadTokens, Handler, Stack, Config) when is_list(BadTokens) ->
?error([BadTokens, Handler, Stack, Config]);
done(Token, Handler, Stack, Config) ->
done([Token], Handler, Stack, Config).
fix_key(Key) when is_atom(Key) -> fix_key(atom_to_binary(Key, utf8));
fix_key(Key) when is_binary(Key) -> Key.
clean_string(Bin, Opts) -> jsx_utils:clean_string(Bin, Opts).
clean_string(Bin, Config) -> jsx_utils:clean_string(Bin, Config).
@ -169,339 +169,16 @@ clean_string(Bin, Opts) -> jsx_utils:clean_string(Bin, Opts).
-include_lib("eunit/include/eunit.hrl").
incomplete_test_() ->
F = parser(jsx, [], []),
decode_test_() ->
Data = jsx:test_cases(),
[
{"incomplete test", ?_assertEqual(
begin
{incomplete, A} = F(start_object),
{incomplete, B} = A(key),
{incomplete, C} = B(true),
{incomplete, D} = C(end_object),
D(end_json)
end,
[start_object, {key, <<"key">>}, {literal, true}, end_object, end_json]
)}
].
encode(Term) -> encode(Term, []).
encode(Term, Opts) ->
try (parser(jsx, [], Opts))(Term)
catch error:badarg -> {error, badarg}
end.
encode_test_() ->
[
{"naked string", ?_assertEqual(
encode([{string, <<"a string\n">>}, end_json]), [{string, <<"a string\n">>}, end_json]
)},
{"naked integer - simple rep", ?_assertEqual(
encode([123, end_json]), [{integer, 123}, end_json]
)},
{"naked integer - alt rep", ?_assertEqual(
encode([{number, 123}, end_json]), [{integer, 123}, end_json]
)},
{"naked integer - full rep", ?_assertEqual(
encode([{integer, 123}, end_json]), [{integer, 123}, end_json]
)},
{"naked float - simple rep", ?_assertEqual(
encode([1.23, end_json]), [{float, 1.23}, end_json]
)},
{"naked float - alt rep", ?_assertEqual(
encode([{number, 1.23}, end_json]), [{float, 1.23}, end_json]
)},
{"naked float - full rep", ?_assertEqual(
encode([{float, 1.23}, end_json]), [{float, 1.23}, end_json]
)},
{"naked literal - simple rep", ?_assertEqual(
encode([null, end_json]), [{literal, null}, end_json]
)},
{"naked literal - full rep", ?_assertEqual(
encode([{literal, null}, end_json]), [{literal, null}, end_json]
)},
{"empty object", ?_assertEqual(
encode([start_object, end_object, end_json]), [start_object, end_object, end_json]
)},
{"empty list", ?_assertEqual(
encode([start_array, end_array, end_json]), [start_array, end_array, end_json]
)},
{"simple list", ?_assertEqual(
encode([
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
{literal, true},
{literal, false},
end_array,
end_json
]),
[
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
{literal, true},
{literal, false},
end_array,
end_json
]
{
Title, ?_assertEqual(
Events ++ [end_json],
value(Events ++ [end_json], {jsx, []}, [], #config{})
)
},
{"simple object", ?_assertEqual(
encode([
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
end_object,
end_json
]),
[
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
end_object,
end_json
]
)
},
{"complex term", ?_assertEqual(
encode([
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
{key, <<"c">>},
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
end_array,
{key, <<"d">>},
start_object,
{key, <<"key">>},
{string, <<"value">>},
end_object,
end_object,
end_json
]),
[
start_object,
{key, <<"a">>},
{literal, true},
{key, <<"b">>},
{literal, false},
{key, <<"c">>},
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
end_array,
{key, <<"d">>},
start_object,
{key, <<"key">>},
{string, <<"value">>},
end_object,
end_object,
end_json
]
)
},
{"atom keys", ?_assertEqual(
encode([start_object, {key, key}, {string, <<"value">>}, end_object, end_json]),
[start_object, {key, <<"key">>}, {string, <<"value">>}, end_object, end_json]
)}
} || {Title, _, _, Events} <- Data
].
encode_failures_test_() ->
[
{"unwrapped values", ?_assertEqual(
{error, badarg},
encode([{string, <<"a string\n">>}, {string, <<"a string\n">>}, end_json])
)},
{"unbalanced array", ?_assertEqual(
{error, badarg},
encode([start_array, end_array, end_array, end_json])
)},
{"premature finish", ?_assertEqual(
{error, badarg},
encode([start_object, {key, <<"key">>, start_array, end_json}])
)},
{"really premature finish", ?_assertEqual(
{error, badarg},
encode([end_json])
)}
].
escapes_test_() ->
[
{"backspace escape", ?_assertEqual(encode(<<"\b">>, [escaped_strings]), [{string, <<"\\b">>}, end_json])},
{"formfeed escape", ?_assertEqual(encode(<<"\f">>, [escaped_strings]), [{string, <<"\\f">>}, end_json])},
{"newline escape", ?_assertEqual(encode(<<"\n">>, [escaped_strings]), [{string, <<"\\n">>}, end_json])},
{"carriage return escape", ?_assertEqual(encode(<<"\r">>, [escaped_strings]), [{string, <<"\\r">>}, end_json])},
{"tab escape", ?_assertEqual(encode(<<"\t">>, [escaped_strings]), [{string, <<"\\t">>}, end_json])},
{"quote escape", ?_assertEqual(encode(<<"\"">>, [escaped_strings]), [{string, <<"\\\"">>}, end_json])},
{"single quote escape", ?_assertEqual(encode(<<"'">>, [escaped_strings, single_quoted_strings]), [{string, <<"\\'">>}, end_json])},
{"no single quote escape", ?_assertEqual(encode(<<"'">>, [escaped_strings]), [{string, <<"'">>}, end_json])},
{"forward slash escape", ?_assertEqual(encode(<<"/">>, [escaped_strings, escaped_forward_slashes]), [{string, <<"\\/">>}, end_json])},
{"no forward slash escape", ?_assertEqual(encode(<<"/">>, [escaped_strings]), [{string, <<"/">>}, end_json])},
{"back slash escape", ?_assertEqual(encode(<<"\\">>, [escaped_strings]), [{string, <<"\\\\">>}, end_json])},
{"jsonp escape", ?_assertEqual(
encode(<<16#2028/utf8, 16#2029/utf8>>, [escaped_strings]),
[{string, <<"\\u2028\\u2029">>}, end_json]
)},
{"no jsonp escape", ?_assertEqual(
encode(<<16#2028/utf8, 16#2029/utf8>>, [escaped_strings, unescaped_jsonp]),
[{string, <<16#2028/utf8, 16#2029/utf8>>}, end_json]
)},
{"control escape", ?_assertEqual(encode(<<0>>, [escaped_strings]), [{string, <<"\\u0000">>}, end_json])},
{"dirty strings", ?_assertEqual(encode(<<"\n">>, [escaped_strings, dirty_strings]), [{string, <<"\n">>}, end_json])},
{"ignore bad escapes", ?_assertEqual(encode(<<"\\x25">>, [escaped_strings, ignored_bad_escapes]), [{string, <<"\\\\x25">>}, end_json])}
].
surrogates_test_() ->
[
{"surrogates - badarg",
?_assert(check_bad(surrogates()))
},
{"surrogates - replaced",
?_assert(check_replaced(surrogates()))
}
].
good_characters_test_() ->
[
{"acceptable codepoints",
?_assert(check_good(good()))
},
{"acceptable codepoints - escaped_strings",
?_assert(check_good(good(), [escaped_strings]))
},
{"acceptable codepoints - replaced_bad_utf8",
?_assert(check_good(good(), [escaped_strings]))
},
{"acceptable codepoints - escaped_strings + replaced_bad_utf8",
?_assert(check_good(good(), [escaped_strings, replaced_bad_utf8]))
},
{"acceptable extended",
?_assert(check_good(good_extended()))
},
{"acceptable extended - escaped_strings",
?_assert(check_good(good_extended(), [escaped_strings]))
},
{"acceptable extended - escaped_strings",
?_assert(check_good(good_extended(), [replaced_bad_utf8]))
}
].
reserved_test_() ->
[
{"reserved noncharacters - badarg",
?_assert(check_bad(reserved_space()))
},
{"reserved noncharacters - replaced",
?_assert(check_replaced(reserved_space()))
}
].
noncharacters_test_() ->
[
{"noncharacters - badarg",
?_assert(check_bad(noncharacters()))
},
{"noncharacters - replaced",
?_assert(check_replaced(noncharacters()))
}
].
extended_noncharacters_test_() ->
[
{"extended noncharacters - badarg",
?_assert(check_bad(extended_noncharacters()))
},
{"extended noncharacters - replaced",
?_assert(check_replaced(extended_noncharacters()))
}
].
check_bad(List) ->
[] == lists:dropwhile(fun({_, {error, badarg}}) -> true ; (_) -> false end,
check(List, [], [])
).
check_replaced(List) ->
[] == lists:dropwhile(fun({_, [{string, <<16#fffd/utf8>>}|_]}) -> true ; (_) -> false
end,
check(List, [replaced_bad_utf8], [])
).
check_good(List) -> check_good(List, []).
check_good(List, Opts) ->
[] == lists:dropwhile(fun({_, [{string, _}|_]}) -> true ; (_) -> false end,
check(List, Opts, [])
).
check([], _Opts, Acc) -> Acc;
check([H|T], Opts, Acc) ->
R = encode(to_fake_utf(H, utf8), Opts),
check(T, Opts, [{H, R}] ++ Acc).
noncharacters() -> lists:seq(16#fffe, 16#ffff).
extended_noncharacters() ->
[16#1fffe, 16#1ffff, 16#2fffe, 16#2ffff]
++ [16#3fffe, 16#3ffff, 16#4fffe, 16#4ffff]
++ [16#5fffe, 16#5ffff, 16#6fffe, 16#6ffff]
++ [16#7fffe, 16#7ffff, 16#8fffe, 16#8ffff]
++ [16#9fffe, 16#9ffff, 16#afffe, 16#affff]
++ [16#bfffe, 16#bffff, 16#cfffe, 16#cffff]
++ [16#dfffe, 16#dffff, 16#efffe, 16#effff]
++ [16#ffffe, 16#fffff, 16#10fffe, 16#10ffff].
surrogates() -> lists:seq(16#d800, 16#dfff).
reserved_space() -> lists:seq(16#fdd0, 16#fdef).
good() -> lists:seq(16#0000, 16#d7ff) ++ lists:seq(16#e000, 16#fdcf) ++ lists:seq(16#fdf0, 16#fffd).
good_extended() -> [16#10000, 16#20000, 16#30000, 16#40000, 16#50000,
16#60000, 16#70000, 16#80000, 16#90000, 16#a0000,
16#b0000, 16#c0000, 16#d0000, 16#e0000, 16#f0000
] ++ lists:seq(16#100000, 16#10fffd).
%% erlang refuses to encode certain codepoints, so fake them all
to_fake_utf(N, utf8) when N < 16#0080 -> <<N:8>>;
to_fake_utf(N, utf8) when N < 16#0800 ->
<<0:5, Y:5, X:6>> = <<N:16>>,
<<2#110:3, Y:5, 2#10:2, X:6>>;
to_fake_utf(N, utf8) when N < 16#10000 ->
<<Z:4, Y:6, X:6>> = <<N:16>>,
<<2#1110:4, Z:4, 2#10:2, Y:6, 2#10:2, X:6>>;
to_fake_utf(N, utf8) ->
<<0:3, W:3, Z:6, Y:6, X:6>> = <<N:24>>,
<<2#11110:5, W:3, 2#10:2, Z:6, 2#10:2, Y:6, 2#10:2, X:6>>.
-endif.

211
src/jsx_tests.hrl Normal file
View file

@ -0,0 +1,211 @@
%% data and helper functions for tests
-export([init/1, handle_event/2]).
-export([test_cases/0]).
-include_lib("eunit/include/eunit.hrl").
%% test handler
init([]) -> [].
handle_event(end_json, State) -> lists:reverse([end_json] ++ State);
handle_event(Event, State) -> [Event] ++ State.
test_cases() ->
empty_array()
++ nested_array()
++ empty_object()
++ nested_object()
++ strings()
++ literals()
++ integers()
++ floats()
++ compound_object().
empty_array() -> [{"[]", <<"[]">>, [], [start_array, end_array]}].
nested_array() ->
[{
"[[[]]]",
<<"[[[]]]">>,
[[[]]],
[start_array, start_array, start_array, end_array, end_array, end_array]
}].
empty_object() -> [{"{}", <<"{}">>, [{}], [start_object, end_object]}].
nested_object() ->
[{
"{\"key\":{\"key\":{}}}",
<<"{\"key\":{\"key\":{}}}">>,
[{<<"key">>, [{<<"key">>, [{}]}]}],
[
start_object,
{key, <<"key">>},
start_object,
{key, <<"key">>},
start_object,
end_object,
end_object,
end_object
]
}].
naked_strings() ->
Raw = [
"",
"hello world"
],
[
{
String,
<<"\"", (list_to_binary(String))/binary, "\"">>,
list_to_binary(String),
[{string, list_to_binary(String)}]
}
|| String <- Raw
].
strings() ->
naked_strings()
++ [ wrap_with_array(Test) || Test <- naked_strings() ]
++ [ wrap_with_object(Test) || Test <- naked_strings() ].
naked_integers() ->
Raw = [
1, 2, 3,
127, 128, 129,
255, 256, 257,
65534, 65535, 65536,
18446744073709551616,
18446744073709551617
],
[
{
integer_to_list(X),
list_to_binary(integer_to_list(X)),
X,
[{integer, X}]
}
|| X <- Raw ++ [ -1 * Y || Y <- Raw ] ++ [0]
].
integers() ->
naked_integers()
++ [ wrap_with_array(Test) || Test <- naked_integers() ]
++ [ wrap_with_object(Test) || Test <- naked_integers() ].
naked_floats() ->
Raw = [
0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9,
1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9,
1234567890.0987654321,
0.0e0,
1234567890.0987654321e16,
0.1e0, 0.1e1, 0.1e2, 0.1e4, 0.1e8, 0.1e16, 0.1e308,
1.0e0, 1.0e1, 1.0e2, 1.0e4, 1.0e8, 1.0e16, 1.0e308,
2.2250738585072014e-308, %% min normalized float
1.7976931348623157e308, %% max normalized float
5.0e-324, %% min denormalized float
2.225073858507201e-308 %% max denormalized float
],
[
{
sane_float_to_list(X),
list_to_binary(sane_float_to_list(X)),
X,
[{float, X}]
}
|| X <- Raw ++ [ -1 * Y || Y <- Raw ]
].
floats() ->
naked_floats()
++ [ wrap_with_array(Test) || Test <- naked_floats() ]
++ [ wrap_with_object(Test) || Test <- naked_floats() ].
naked_literals() ->
[
{
atom_to_list(Literal),
atom_to_binary(Literal, unicode),
Literal,
[{literal, Literal}]
}
|| Literal <- [true, false, null]
].
literals() ->
naked_literals()
++ [ wrap_with_array(Test) || Test <- naked_literals() ]
++ [ wrap_with_object(Test) || Test <- naked_literals() ].
compound_object() ->
[{
"[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]",
<<"[{\"alpha\":[1,2,3],\"beta\":{\"alpha\":[1.0,2.0,3.0],\"beta\":[true,false]}},[{}]]">>,
[[{<<"alpha">>, [1, 2, 3]}, {<<"beta">>, [{<<"alpha">>, [1.0, 2.0, 3.0]}, {<<"beta">>, [true, false]}]}], [[{}]]],
[
start_array,
start_object,
{key, <<"alpha">>},
start_array,
{integer, 1},
{integer, 2},
{integer, 3},
end_array,
{key, <<"beta">>},
start_object,
{key, <<"alpha">>},
start_array,
{float, 1.0},
{float, 2.0},
{float, 3.0},
end_array,
{key, <<"beta">>},
start_array,
{literal, true},
{literal, false},
end_array,
end_object,
end_object,
start_array,
start_object,
end_object,
end_array,
end_array
]
}].
wrap_with_array({Title, JSON, Term, Events}) ->
{
"[" ++ Title ++ "]",
<<"[", JSON/binary, "]">>,
[Term],
[start_array] ++ Events ++ [end_array]
}.
wrap_with_object({Title, JSON, Term, Events}) ->
{
"{\"key\":" ++ Title ++ "}",
<<"{\"key\":", JSON/binary, "}">>,
[{<<"key">>, Term}],
[start_object, {key, <<"key">>}] ++ Events ++ [end_object]
}.
sane_float_to_list(X) ->
[Output] = io_lib:format("~p", [X]),
Output.

View file

@ -27,49 +27,49 @@
-export([init/1, handle_event/2]).
-record(opts, {
-record(config, {
space = 0,
indent = 0,
depth = 0
}).
-type opts() :: list().
-type config() :: list().
-spec to_json(Source::any(), Opts::opts()) -> binary().
-spec to_json(Source::any(), Config::config()) -> binary().
to_json(Source, Opts) when is_list(Opts) ->
(jsx:encoder(?MODULE, Opts, jsx_utils:extract_opts(Opts ++ [escaped_strings])))(Source).
to_json(Source, Config) when is_list(Config) ->
(jsx:encoder(?MODULE, Config, jsx_utils:extract_config(Config ++ [escaped_strings])))(Source).
-spec format(Source::binary(), Opts::opts()) -> binary().
-spec format(Source::binary(), Config::config()) -> binary().
format(Source, Opts) when is_binary(Source) andalso is_list(Opts) ->
(jsx:decoder(?MODULE, Opts, jsx_utils:extract_opts(Opts ++ [escaped_strings])))(Source).
format(Source, Config) when is_binary(Source) andalso is_list(Config) ->
(jsx:decoder(?MODULE, Config, jsx_utils:extract_config(Config ++ [escaped_strings])))(Source).
parse_opts(Opts) -> parse_opts(Opts, #opts{}).
parse_config(Config) -> parse_config(Config, #config{}).
parse_opts([{space, Val}|Rest], Opts) when is_integer(Val), Val > 0 ->
parse_opts(Rest, Opts#opts{space = Val});
parse_opts([space|Rest], Opts) ->
parse_opts(Rest, Opts#opts{space = 1});
parse_opts([{indent, Val}|Rest], Opts) when is_integer(Val), Val > 0 ->
parse_opts(Rest, Opts#opts{indent = Val});
parse_opts([indent|Rest], Opts) ->
parse_opts(Rest, Opts#opts{indent = 1});
parse_opts([{K, _}|Rest] = Options, Opts) ->
parse_config([{space, Val}|Rest], Config) when is_integer(Val), Val > 0 ->
parse_config(Rest, Config#config{space = Val});
parse_config([space|Rest], Config) ->
parse_config(Rest, Config#config{space = 1});
parse_config([{indent, Val}|Rest], Config) when is_integer(Val), Val > 0 ->
parse_config(Rest, Config#config{indent = Val});
parse_config([indent|Rest], Config) ->
parse_config(Rest, Config#config{indent = 1});
parse_config([{K, _}|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([K|Rest] = Options, Opts) ->
parse_config([K|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([], Opts) ->
Opts.
parse_config([], Config) ->
Config.
@ -85,97 +85,97 @@ parse_opts([], Opts) ->
init(Opts) -> {start, [], parse_opts(Opts)}.
init(Config) -> {start, [], parse_config(Config)}.
handle_event(Event, {start, Acc, Opts}) ->
handle_event(Event, {start, Acc, Config}) ->
case Event of
{Type, Value} -> {[], [Acc, encode(Type, Value, Opts)], Opts}
; start_object -> {[object_start], [Acc, ?start_object], Opts}
; start_array -> {[array_start], [Acc, ?start_array], Opts}
{Type, Value} -> {[], [Acc, encode(Type, Value, Config)], Config}
; start_object -> {[object_start], [Acc, ?start_object], Config}
; start_array -> {[array_start], [Acc, ?start_array], Config}
end;
handle_event(Event, {[object_start|Stack], Acc, OldOpts = #opts{depth = Depth}}) ->
Opts = OldOpts#opts{depth = Depth + 1},
handle_event(Event, {[object_start|Stack], Acc, OldConfig = #config{depth = Depth}}) ->
Config = OldConfig#config{depth = Depth + 1},
case Event of
{key, Key} ->
{[object_value|Stack], [Acc, indent(Opts), encode(string, Key, Opts), ?colon, space(Opts)], Opts}
{[object_value|Stack], [Acc, indent(Config), encode(string, Key, Config), ?colon, space(Config)], Config}
; end_object ->
{Stack, [Acc, ?end_object], OldOpts}
{Stack, [Acc, ?end_object], OldConfig}
end;
handle_event(Event, {[object_value|Stack], Acc, Opts}) ->
handle_event(Event, {[object_value|Stack], Acc, Config}) ->
case Event of
{Type, Value} when Type == string; Type == literal;
Type == integer; Type == float ->
{[key|Stack], [Acc, encode(Type, Value, Opts)], Opts}
; start_object -> {[object_start, key|Stack], [Acc, ?start_object], Opts}
; start_array -> {[array_start, key|Stack], [Acc, ?start_array], Opts}
{[key|Stack], [Acc, encode(Type, Value, Config)], Config}
; start_object -> {[object_start, key|Stack], [Acc, ?start_object], Config}
; start_array -> {[array_start, key|Stack], [Acc, ?start_array], Config}
end;
handle_event(Event, {[key|Stack], Acc, Opts = #opts{depth = Depth}}) ->
handle_event(Event, {[key|Stack], Acc, Config = #config{depth = Depth}}) ->
case Event of
{key, Key} ->
{[object_value|Stack], [Acc, ?comma, indent_or_space(Opts), encode(string, Key, Opts), ?colon, space(Opts)], Opts}
{[object_value|Stack], [Acc, ?comma, indent_or_space(Config), encode(string, Key, Config), ?colon, space(Config)], Config}
; end_object ->
NewOpts = Opts#opts{depth = Depth - 1},
{Stack, [Acc, indent(NewOpts), ?end_object], NewOpts}
NewConfig = Config#config{depth = Depth - 1},
{Stack, [Acc, indent(NewConfig), ?end_object], NewConfig}
end;
handle_event(Event, {[array_start|Stack], Acc, OldOpts = #opts{depth = Depth}}) ->
Opts = OldOpts#opts{depth = Depth + 1},
handle_event(Event, {[array_start|Stack], Acc, OldConfig = #config{depth = Depth}}) ->
Config = OldConfig#config{depth = Depth + 1},
case Event of
{Type, Value} when Type == string; Type == literal;
Type == integer; Type == float ->
{[array|Stack], [Acc, indent(Opts), encode(Type, Value, Opts)], Opts}
; start_object -> {[object_start, array|Stack], [Acc, indent(Opts), ?start_object], Opts}
; start_array -> {[array_start, array|Stack], [Acc, indent(Opts), ?start_array], Opts}
; end_array -> {Stack, [Acc, ?end_array], OldOpts}
{[array|Stack], [Acc, indent(Config), encode(Type, Value, Config)], Config}
; start_object -> {[object_start, array|Stack], [Acc, indent(Config), ?start_object], Config}
; start_array -> {[array_start, array|Stack], [Acc, indent(Config), ?start_array], Config}
; end_array -> {Stack, [Acc, ?end_array], OldConfig}
end;
handle_event(Event, {[array|Stack], Acc, Opts = #opts{depth = Depth}}) ->
handle_event(Event, {[array|Stack], Acc, Config = #config{depth = Depth}}) ->
case Event of
{Type, Value} when Type == string; Type == literal;
Type == integer; Type == float ->
{[array|Stack], [Acc, ?comma, indent_or_space(Opts), encode(Type, Value, Opts)], Opts}
{[array|Stack], [Acc, ?comma, indent_or_space(Config), encode(Type, Value, Config)], Config}
; end_array ->
NewOpts = Opts#opts{depth = Depth - 1},
{Stack, [Acc, indent(NewOpts), ?end_array], NewOpts}
; start_object -> {[object_start, array|Stack], [Acc, ?comma, indent_or_space(Opts), ?start_object], Opts}
; start_array -> {[array_start, array|Stack], [Acc, ?comma, indent_or_space(Opts), ?start_array], Opts}
NewConfig = Config#config{depth = Depth - 1},
{Stack, [Acc, indent(NewConfig), ?end_array], NewConfig}
; start_object -> {[object_start, array|Stack], [Acc, ?comma, indent_or_space(Config), ?start_object], Config}
; start_array -> {[array_start, array|Stack], [Acc, ?comma, indent_or_space(Config), ?start_array], Config}
end;
handle_event(end_json, {[], Acc, _Opts}) -> unicode:characters_to_binary(Acc, utf8).
handle_event(end_json, {[], Acc, _Config}) -> unicode:characters_to_binary(Acc, utf8).
encode(string, String, _Opts) ->
encode(string, String, _Config) ->
[?quote, String, ?quote];
encode(literal, Literal, _Opts) ->
encode(literal, Literal, _Config) ->
erlang:atom_to_list(Literal);
encode(integer, Integer, _Opts) ->
encode(integer, Integer, _Config) ->
erlang:integer_to_list(Integer);
encode(float, Float, _Opts) ->
encode(float, Float, _Config) ->
[Output] = io_lib:format("~p", [Float]), Output.
space(Opts) ->
case Opts#opts.space of
space(Config) ->
case Config#config.space of
0 -> []
; X when X > 0 -> binary:copy(?space, X)
end.
indent(Opts) ->
case Opts#opts.indent of
indent(Config) ->
case Config#config.indent of
0 -> []
; X when X > 0 ->
Indent = binary:copy(?space, X),
indent(Indent, Opts#opts.depth, [?newline])
indent(Indent, Config#config.depth, [?newline])
end.
indent(_Indent, 0, Acc) -> Acc;
indent(Indent, N, Acc) -> indent(Indent, N - 1, [Acc, Indent]).
indent_or_space(Opts) ->
case Opts#opts.indent > 0 of
true -> indent(Opts)
; false -> space(Opts)
indent_or_space(Config) ->
case Config#config.indent > 0 of
true -> indent(Config)
; false -> space(Config)
end.
@ -184,138 +184,122 @@ indent_or_space(Opts) ->
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
basic_format_test_() ->
config_test_() ->
[
{"empty object", ?_assertEqual(format(<<"{}">>, []), <<"{}">>)},
{"empty array", ?_assertEqual(format(<<"[]">>, []), <<"[]">>)},
{"naked integer", ?_assertEqual(format(<<"123">>, []), <<"123">>)},
{"naked float", ?_assertEqual(format(<<"1.23">>, []), <<"1.23">>)},
{"naked string", ?_assertEqual(format(<<"\"hi\"">>, []), <<"\"hi\"">>)},
{"naked string with control character", ?_assertEqual(
format(<<"\"hi\\n\"">>, []), <<"\"hi\\n\"">>
{"empty config", ?_assertEqual(#config{}, parse_config([]))},
{"unspecified indent/space", ?_assertEqual(
#config{space=1, indent=1},
parse_config([space, indent])
)},
{"naked literal", ?_assertEqual(format(<<"true">>, []), <<"true">>)},
{"simple object", ?_assertEqual(
format(<<" { \"key\" :\n\t \"value\"\r\r\r\n } ">>, []),
<<"{\"key\":\"value\"}">>
{"specific indent", ?_assertEqual(
#config{indent=4},
parse_config([{indent, 4}])
)},
{"really simple object", ?_assertEqual(format(<<"{\"k\":\"v\"}">>, []) , <<"{\"k\":\"v\"}">>)},
{"nested object", ?_assertEqual(
format(<<"{\"k\":{\"k\":\"v\"}, \"j\":{}}">>, []),
<<"{\"k\":{\"k\":\"v\"},\"j\":{}}">>
{"specific space", ?_assertEqual(
#config{space=2},
parse_config([{space, 2}])
)},
{"simple array", ?_assertEqual(
format(<<" [\n\ttrue,\n\tfalse , \n \tnull\n] ">>, []),
<<"[true,false,null]">>
{"specific space and indent", ?_assertEqual(
#config{space=2, indent=2},
parse_config([{space, 2}, {indent, 2}])
)},
{"really simple array", ?_assertEqual(format(<<"[1]">>, []), <<"[1]">>)},
{"nested array", ?_assertEqual(format(<<"[[[]]]">>, []), <<"[[[]]]">>)},
{"nested structures", ?_assertEqual(
format(<<"[
{
\"key\":\"value\",
\"another key\": \"another value\",
\"a list\": [true, false]
},
[[{}]]
]">>, []),
<<"[{\"key\":\"value\",\"another key\":\"another value\",\"a list\":[true,false]},[[{}]]]">>
{"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
{"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
].
space_test_() ->
[
{"no space", ?_assertEqual([], space(#config{space=0}))},
{"one space", ?_assertEqual(<<" ">>, space(#config{space=1}))},
{"four spaces", ?_assertEqual(<<" ">>, space(#config{space=4}))}
].
indent_test_() ->
[
{"no indent", ?_assertEqual([], indent(#config{indent=0, depth=1}))},
{"indent 1 depth 1", ?_assertEqual(
[[?newline], ?space],
indent(#config{indent=1, depth=1})
)},
{"simple nested structure",
?_assertEqual(
format(<<"[[],{\"k\":[[],{}],\"j\":{}},[]]">>, []),
<<"[[],{\"k\":[[],{}],\"j\":{}},[]]">>
{"indent 1 depth 2", ?_assertEqual(
[[[?newline], ?space], ?space],
indent(#config{indent=1, depth=2})
)},
{"indent 4 depth 1", ?_assertEqual(
[[?newline], <<" ">>],
indent(#config{indent=4, depth=1})
)},
{"indent 4 depth 2", ?_assertEqual(
[[[?newline], <<" ">>], <<" ">>],
indent(#config{indent=4, depth=2})
)}
].
indent_or_space_test_() ->
[
{"no indent so space", ?_assertEqual(
<<" ">>,
indent_or_space(#config{space=1, indent=0, depth=1})
)},
{"indent so no space", ?_assertEqual(
[[?newline], ?space],
indent_or_space(#config{space=1, indent=1, depth=1})
)}
].
format_test_() ->
[
{"0.0", ?_assert(encode(float, 0.0, #config{}) =:= "0.0")},
{"1.0", ?_assert(encode(float, 1.0, #config{}) =:= "1.0")},
{"-1.0", ?_assert(encode(float, -1.0, #config{}) =:= "-1.0")},
{"3.1234567890987654321",
?_assert(
encode(float, 3.1234567890987654321, #config{}) =:= "3.1234567890987655")
},
{"1.0e23", ?_assert(encode(float, 1.0e23, #config{}) =:= "1.0e23")},
{"0.3", ?_assert(encode(float, 3.0/10.0, #config{}) =:= "0.3")},
{"0.0001", ?_assert(encode(float, 0.0001, #config{}) =:= "0.0001")},
{"0.00001", ?_assert(encode(float, 0.00001, #config{}) =:= "1.0e-5")},
{"0.00000001", ?_assert(encode(float, 0.00000001, #config{}) =:= "1.0e-8")},
{"1.0e-323", ?_assert(encode(float, 1.0e-323, #config{}) =:= "1.0e-323")},
{"1.0e308", ?_assert(encode(float, 1.0e308, #config{}) =:= "1.0e308")},
{"min normalized float",
?_assert(
encode(float, math:pow(2, -1022), #config{}) =:= "2.2250738585072014e-308"
)
},
{"max normalized float",
?_assert(
encode(float, (2 - math:pow(2, -52)) * math:pow(2, 1023), #config{})
=:= "1.7976931348623157e308"
)
},
{"min denormalized float",
?_assert(encode(float, math:pow(2, -1074), #config{}) =:= "5.0e-324")
},
{"max denormalized float",
?_assert(
encode(float, (1 - math:pow(2, -52)) * math:pow(2, -1022), #config{})
=:= "2.225073858507201e-308"
)
}
].
basic_to_json_test_() ->
[
{"empty object", ?_assertEqual(to_json([{}], []), <<"{}">>)},
{"empty array", ?_assertEqual(to_json([], []), <<"[]">>)},
{"naked integer", ?_assertEqual(to_json(123, []), <<"123">>)},
{"naked float", ?_assertEqual(to_json(1.23, []) , <<"1.23">>)},
{"naked string", ?_assertEqual(to_json(<<"hi">>, []), <<"\"hi\"">>)},
{"naked string with control character", ?_assertEqual(
to_json(<<"hi\n">>, []), <<"\"hi\\n\"">>
)},
{"naked literal", ?_assertEqual(to_json(true, []), <<"true">>)},
{"simple object", ?_assertEqual(
to_json(
[{<<"key">>, <<"value">>}],
[]
),
<<"{\"key\":\"value\"}">>
)},
{"nested object", ?_assertEqual(
to_json(
[{<<"k">>,[{<<"k">>,<<"v">>}]},{<<"j">>,[{}]}],
[]
),
<<"{\"k\":{\"k\":\"v\"},\"j\":{}}">>
)},
{"simple array", ?_assertEqual(to_json([true, false, null], []), <<"[true,false,null]">>)},
{"really simple array", ?_assertEqual(to_json([1], []), <<"[1]">>)},
{"nested array", ?_assertEqual(to_json([[[]]], []), <<"[[[]]]">>)},
{"nested structures", ?_assertEqual(
to_json(
[
[
{<<"key">>, <<"value">>},
{<<"another key">>, <<"another value">>},
{<<"a list">>, [true, false]}
],
[[[{}]]]
],
[]
),
<<"[{\"key\":\"value\",\"another key\":\"another value\",\"a list\":[true,false]},[[{}]]]">>
)},
{"simple nested structure", ?_assertEqual(
to_json(
[[], [{<<"k">>, [[], [{}]]}, {<<"j">>, [{}]}], []],
[]
),
<<"[[],{\"k\":[[],{}],\"j\":{}},[]]">>
)}
].
opts_test_() ->
handle_event_test_() ->
Data = jsx:test_cases(),
[
{"unspecified indent/space", ?_assertEqual(
format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>, [space, indent]),
<<"[\n true,\n false,\n null\n]">>
)},
{"specific indent/space", ?_assertEqual(
format(
<<"\n{\n\"key\" : [],\n\"another key\" : true\n}\n">>,
[{space, 2}, {indent, 3}]
),
<<"{\n \"key\": [],\n \"another key\": true\n}">>
)},
{"nested structures", ?_assertEqual(
format(
<<"[{\"key\":\"value\", \"another key\": \"another value\"}, [[true, false, null]]]">>,
[{space, 2}, {indent, 2}]
),
<<"[\n {\n \"key\": \"value\",\n \"another key\": \"another value\"\n },\n [\n [\n true,\n false,\n null\n ]\n ]\n]">>
)},
{"array spaces", ?_assertEqual(
format(<<"[1,2,3]">>, [{space, 2}]),
<<"[1, 2, 3]">>
)},
{"object spaces", ?_assertEqual(
format(<<"{\"a\":true,\"b\":true,\"c\":true}">>, [{space, 2}]),
<<"{\"a\": true, \"b\": true, \"c\": true}">>
)},
{"array indent", ?_assertEqual(
format(<<"[1.23, 1.23, 1.23]">>, [{indent, 2}]),
<<"[\n 1.23,\n 1.23,\n 1.23\n]">>
)},
{"object indent", ?_assertEqual(
format(<<"{\"a\":true,\"b\":true,\"c\":true}">>, [{indent, 2}]),
<<"{\n \"a\":true,\n \"b\":true,\n \"c\":true\n}">>
)}
{
Title, ?_assertEqual(
JSON,
lists:foldl(fun handle_event/2, {start, [], #config{}}, Events ++ [end_json])
)
} || {Title, JSON, _, Events} <- Data
].

View file

@ -27,12 +27,12 @@
-export([init/1, handle_event/2]).
-record(opts, {
-record(config, {
labels = binary,
post_decode = false
}).
-type opts() :: list().
-type config() :: list().
-type json_value() :: list({binary(), json_value()})
| list(json_value())
@ -44,74 +44,74 @@
| binary().
-spec to_term(Source::binary(), Opts::opts()) -> json_value().
-spec to_term(Source::binary(), Config::config()) -> json_value().
to_term(Source, Opts) when is_list(Opts) ->
(jsx:decoder(?MODULE, Opts, jsx_utils:extract_opts(Opts)))(Source).
to_term(Source, Config) when is_list(Config) ->
(jsx:decoder(?MODULE, Config, jsx_utils:extract_config(Config)))(Source).
parse_opts(Opts) -> parse_opts(Opts, #opts{}).
parse_config(Config) -> parse_config(Config, #config{}).
parse_opts([{labels, Val}|Rest], Opts)
parse_config([{labels, Val}|Rest], Config)
when Val == binary; Val == atom; Val == existing_atom ->
parse_opts(Rest, Opts#opts{labels = Val});
parse_opts([labels|Rest], Opts) ->
parse_opts(Rest, Opts#opts{labels = binary});
parse_opts([{post_decode, F}|Rest], Opts=#opts{post_decode=false}) when is_function(F, 1) ->
parse_opts(Rest, Opts#opts{post_decode=F});
parse_opts([{K, _}|Rest] = Options, Opts) ->
parse_config(Rest, Config#config{labels = Val});
parse_config([labels|Rest], Config) ->
parse_config(Rest, Config#config{labels = binary});
parse_config([{post_decode, F}|Rest], Config=#config{post_decode=false}) when is_function(F, 1) ->
parse_config(Rest, Config#config{post_decode=F});
parse_config([{K, _}|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([K|Rest] = Options, Opts) ->
parse_config([K|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([], Opts) ->
Opts.
parse_config([], Config) ->
Config.
init(Opts) -> {[[]], parse_opts(Opts)}.
init(Config) -> {[[]], parse_config(Config)}.
handle_event(end_json, {[[Terms]], _Opts}) -> Terms;
handle_event(end_json, {[[Terms]], _Config}) -> Terms;
handle_event(start_object, {Terms, Opts}) -> {[[]|Terms], Opts};
handle_event(end_object, {[[], {key, Key}, Last|Terms], Opts}) ->
{[[{Key, post_decode([{}], Opts)}] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[Object, {key, Key}, Last|Terms], Opts}) ->
{[[{Key, post_decode(lists:reverse(Object), Opts)}] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[[], Last|Terms], Opts}) ->
{[[post_decode([{}], Opts)] ++ Last] ++ Terms, Opts};
handle_event(end_object, {[Object, Last|Terms], Opts}) ->
{[[post_decode(lists:reverse(Object), Opts)] ++ Last] ++ Terms, Opts};
handle_event(start_object, {Terms, Config}) -> {[[]|Terms], Config};
handle_event(end_object, {[[], {key, Key}, Last|Terms], Config}) ->
{[[{Key, post_decode([{}], Config)}] ++ Last] ++ Terms, Config};
handle_event(end_object, {[Object, {key, Key}, Last|Terms], Config}) ->
{[[{Key, post_decode(lists:reverse(Object), Config)}] ++ Last] ++ Terms, Config};
handle_event(end_object, {[[], Last|Terms], Config}) ->
{[[post_decode([{}], Config)] ++ Last] ++ Terms, Config};
handle_event(end_object, {[Object, Last|Terms], Config}) ->
{[[post_decode(lists:reverse(Object), Config)] ++ Last] ++ Terms, Config};
handle_event(start_array, {Terms, Opts}) -> {[[]|Terms], Opts};
handle_event(end_array, {[List, {key, Key}, Last|Terms], Opts}) ->
{[[{Key, post_decode(lists:reverse(List), Opts)}] ++ Last] ++ Terms, Opts};
handle_event(end_array, {[Current, Last|Terms], Opts}) ->
{[[post_decode(lists:reverse(Current), Opts)] ++ Last] ++ Terms, Opts};
handle_event(start_array, {Terms, Config}) -> {[[]|Terms], Config};
handle_event(end_array, {[List, {key, Key}, Last|Terms], Config}) ->
{[[{Key, post_decode(lists:reverse(List), Config)}] ++ Last] ++ Terms, Config};
handle_event(end_array, {[List, Last|Terms], Config}) ->
{[[post_decode(lists:reverse(List), Config)] ++ Last] ++ Terms, Config};
handle_event({key, Key}, {Terms, Opts}) -> {[{key, format_key(Key, Opts)}] ++ Terms, Opts};
handle_event({key, Key}, {Terms, Config}) -> {[{key, format_key(Key, Config)}] ++ Terms, Config};
handle_event({_, Event}, {[{key, Key}, Last|Terms], Opts}) ->
{[[{Key, post_decode(Event, Opts)}] ++ Last] ++ Terms, Opts};
handle_event({_, Event}, {[Last|Terms], Opts}) ->
{[[post_decode(Event, Opts)] ++ Last] ++ Terms, Opts}.
handle_event({_, Event}, {[{key, Key}, Last|Terms], Config}) ->
{[[{Key, post_decode(Event, Config)}] ++ Last] ++ Terms, Config};
handle_event({_, Event}, {[Last|Terms], Config}) ->
{[[post_decode(Event, Config)] ++ Last] ++ Terms, Config}.
format_key(Key, Opts) ->
case Opts#opts.labels of
format_key(Key, Config) ->
case Config#config.labels of
binary -> Key
; atom -> binary_to_atom(Key, utf8)
; existing_atom -> binary_to_existing_atom(Key, utf8)
end.
post_decode(Value, #opts{post_decode=false}) -> Value;
post_decode(Value, Opts) -> (Opts#opts.post_decode)(Value).
post_decode(Value, #config{post_decode=false}) -> Value;
post_decode(Value, Config) -> (Config#config.post_decode)(Value).
%% eunit tests
@ -119,118 +119,163 @@ post_decode(Value, Opts) -> (Opts#opts.post_decode)(Value).
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
basic_test_() ->
config_test_() ->
%% for post_decode tests
F = fun(X) -> X end,
G = fun(X, Y) -> {X, Y} end,
[
{"empty object", ?_assertEqual(to_term(<<"{}">>, []), [{}])},
{"simple object", ?_assertEqual(to_term(<<"{\"key\": true}">>, []), [{<<"key">>, true}])},
{"less simple object", ?_assertEqual(
to_term(<<"{\"a\": 1, \"b\": 2}">>, []),
[{<<"a">>, 1}, {<<"b">>, 2}]
{"empty config", ?_assertEqual(#config{}, parse_config([]))},
{"implicit binary labels", ?_assertEqual(#config{}, parse_config([labels]))},
{"binary labels", ?_assertEqual(#config{}, parse_config([{labels, binary}]))},
{"atom labels", ?_assertEqual(#config{labels=atom}, parse_config([{labels, atom}]))},
{"existing atom labels", ?_assertEqual(
#config{labels=existing_atom},
parse_config([{labels, existing_atom}])
)},
{"nested object", ?_assertEqual(
to_term(<<"{\"key\": {\"key\": true}}">>, []),
[{<<"key">>, [{<<"key">>, true}]}]
{"post decode", ?_assertEqual(
#config{post_decode=F},
parse_config([{post_decode, F}])
)},
{"empty array", ?_assert(to_term(<<"[]">>, []) =:= [])},
{"list of lists", ?_assertEqual(to_term(<<"[[],[],[]]">>, []), [[], [], []])},
{"list of strings", ?_assertEqual(to_term(<<"[\"hi\", \"there\"]">>, []), [<<"hi">>, <<"there">>])},
{"list of numbers", ?_assertEqual(to_term(<<"[1, 2.0, 3e4, -5]">>, []), [1, 2.0, 3.0e4, -5])},
{"list of literals", ?_assertEqual(to_term(<<"[true,false,null]">>, []), [true,false,null])},
{"list of objects", ?_assertEqual(
to_term(<<"[{}, {\"a\":1, \"b\":2}, {\"key\":[true,false]}]">>, []),
[[{}], [{<<"a">>,1},{<<"b">>,2}], [{<<"key">>,[true,false]}]]
{"post decode wrong arity", ?_assertError(badarg, parse_config([{post_decode, G}]))},
{"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
{"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
].
format_key_test_() ->
[
{"binary key", ?_assertEqual(<<"key">>, format_key(<<"key">>, #config{labels=binary}))},
{"atom key", ?_assertEqual(key, format_key(<<"key">>, #config{labels=atom}))},
{"existing atom key", ?_assertEqual(
key,
format_key(<<"key">>, #config{labels=existing_atom})
)},
{"nonexisting atom key", ?_assertError(
badarg,
format_key(<<"nonexistentatom">>, #config{labels=existing_atom})
)}
].
comprehensive_test_() ->
{"comprehensive test", ?_assertEqual(to_term(comp_json(), []), comp_term())}.
comp_json() ->
<<"[
{\"a key\": {\"a key\": -17.346, \"another key\": 3e152, \"last key\": 14}},
[0,1,2,3,4,5],
[{\"a\": \"a\", \"b\": \"b\"}, {\"c\": \"c\", \"d\": \"d\"}],
[true, false, null],
{},
[],
[{},{}],
{\"key\": [], \"another key\": {}}
]">>.
comp_term() ->
[
[{<<"a key">>, [{<<"a key">>, -17.346}, {<<"another key">>, 3.0e152}, {<<"last key">>, 14}]}],
[0,1,2,3,4,5],
[[{<<"a">>, <<"a">>}, {<<"b">>, <<"b">>}], [{<<"c">>, <<"c">>}, {<<"d">>, <<"d">>}]],
[true, false, null],
[{}],
[],
[[{}], [{}]],
[{<<"key">>, []}, {<<"another key">>, [{}]}]
].
atom_labels_test_() ->
{"atom labels test", ?_assertEqual(to_term(comp_json(), [{labels, atom}]), atom_term())}.
atom_term() ->
[
[{'a key', [{'a key', -17.346}, {'another key', 3.0e152}, {'last key', 14}]}],
[0,1,2,3,4,5],
[[{a, <<"a">>}, {b, <<"b">>}], [{'c', <<"c">>}, {'d', <<"d">>}]],
[true, false, null],
[{}],
[],
[[{}], [{}]],
[{key, []}, {'another key', [{}]}]
].
naked_test_() ->
[
{"naked integer", ?_assertEqual(to_term(<<"123">>, []), 123)},
{"naked float", ?_assertEqual(to_term(<<"-4.32e-17">>, []), -4.32e-17)},
{"naked literal", ?_assertEqual(to_term(<<"true">>, []), true)},
{"naked string", ?_assertEqual(to_term(<<"\"string\"">>, []), <<"string">>)}
].
post_decoders_test_() ->
JSON = <<"{\"object\": {
\"literals\": [true, false, null],
\"strings\": [\"foo\", \"bar\", \"baz\"],
\"numbers\": [1, 1.0, 1e0]
}}">>,
Events = [
[{}],
[{<<"key">>, <<"value">>}],
[{<<"true">>, true}, {<<"false">>, false}, {<<"null">>, null}],
[],
[<<"string">>],
[true, false, null],
true,
false,
null,
<<"hello">>,
<<"world">>,
1,
1.0
],
[
{"no post_decode", ?_assertEqual(
to_term(JSON, []),
[{<<"object">>, [
{<<"literals">>, [true, false, null]},
{<<"strings">>, [<<"foo">>, <<"bar">>, <<"baz">>]},
{<<"numbers">>, [1, 1.0, 1.0]}
]}]
Events,
[ post_decode(Event, #config{}) || Event <- Events ]
)},
{"replace arrays with empty arrays", ?_assertEqual(
to_term(JSON, [{post_decode, fun([T|_] = V) when is_tuple(T) -> V; (V) when is_list(V) -> []; (V) -> V end}]),
[{<<"object">>, [{<<"literals">>, []}, {<<"strings">>, []}, {<<"numbers">>, []}]}]
[
[{}],
[{<<"key">>, <<"value">>}],
[{<<"true">>, true}, {<<"false">>, false}, {<<"null">>, null}],
[],
[],
[],
true,
false,
null,
<<"hello">>,
<<"world">>,
1,
1.0
],
[ post_decode(Event, #config{
post_decode=fun([T|_] = V) when is_tuple(T) -> V; (V) when is_list(V) -> []; (V) -> V end
}) || Event <- Events
]
)},
{"replace objects with empty objects", ?_assertEqual(
to_term(JSON, [{post_decode, fun(V) when is_list(V) -> [{}]; (V) -> V end}]),
[{}]
[
[{}],
[{}],
[{}],
[],
[<<"string">>],
[true, false, null],
true,
false,
null,
<<"hello">>,
<<"world">>,
1,
1.0
],
[ post_decode(Event, #config{
post_decode=fun([T|_]) when is_tuple(T) -> [{}]; (V) -> V end
}) || Event <- Events
]
)},
{"replace all non-list values with false", ?_assertEqual(
to_term(JSON, [{post_decode, fun(V) when is_list(V) -> V; (_) -> false end}]),
[{<<"object">>, [
{<<"literals">>, [false, false, false]},
{<<"strings">>, [false, false, false]},
{<<"numbers">>, [false, false, false]}
]}]
{"replace all non-array/non-object values with false", ?_assertEqual(
[
[{}],
[{<<"key">>, <<"value">>}],
[{<<"true">>, true}, {<<"false">>, false}, {<<"null">>, null}],
[],
[<<"string">>],
[true, false, null],
false,
false,
false,
false,
false,
false,
false
],
[ post_decode(Event, #config{
post_decode=fun(V) when is_list(V) -> V; (_) -> false end
}) || Event <- Events
]
)},
{"atoms_to_strings", ?_assertEqual(
to_term(JSON, [{post_decode, fun(V) when is_atom(V) -> unicode:characters_to_binary(atom_to_list(V)); (V) -> V end}]),
[{<<"object">>, [
{<<"literals">>, [<<"true">>, <<"false">>, <<"null">>]},
{<<"strings">>, [<<"foo">>, <<"bar">>, <<"baz">>]},
{<<"numbers">>, [1, 1.0, 1.0]}
]}]
[
[{}],
[{<<"key">>, <<"value">>}],
[{<<"true">>, true}, {<<"false">>, false}, {<<"null">>, null}],
[],
[<<"string">>],
[true, false, null],
<<"true">>,
<<"false">>,
<<"null">>,
<<"hello">>,
<<"world">>,
1,
1.0
],
[ post_decode(Event, #config{
post_decode=fun(V) when is_atom(V) -> unicode:characters_to_binary(atom_to_list(V)); (V) -> V end
}) || Event <- Events
]
)}
].
handle_event_test_() ->
Data = jsx:test_cases(),
[
{
Title, ?_assertEqual(
Term,
lists:foldl(fun handle_event/2, {[[]], #config{}}, Events ++ [end_json])
)
} || {Title, _, Term, Events} <- Data
].
-endif.

File diff suppressed because it is too large Load diff

View file

@ -27,65 +27,65 @@
-export([init/1, handle_event/2]).
-record(opts, {
-record(config, {
repeated_keys = true
}).
-type opts() :: [].
-type config() :: [].
-spec is_json(Source::binary(), Opts::opts()) -> true | false.
-spec is_json(Source::binary(), Config::config()) -> true | false.
is_json(Source, Opts) when is_list(Opts) ->
try (jsx:decoder(?MODULE, Opts, jsx_utils:extract_opts(Opts)))(Source)
is_json(Source, Config) when is_list(Config) ->
try (jsx:decoder(?MODULE, Config, jsx_utils:extract_config(Config)))(Source)
catch error:badarg -> false
end.
-spec is_term(Source::any(), Opts::opts()) -> true | false.
-spec is_term(Source::any(), Config::config()) -> true | false.
is_term(Source, Opts) when is_list(Opts) ->
try (jsx:encoder(?MODULE, Opts, jsx_utils:extract_opts(Opts)))(Source)
is_term(Source, Config) when is_list(Config) ->
try (jsx:encoder(?MODULE, Config, jsx_utils:extract_config(Config)))(Source)
catch error:badarg -> false
end.
parse_opts(Opts) -> parse_opts(Opts, #opts{}).
parse_config(Config) -> parse_config(Config, #config{}).
parse_opts([{repeated_keys, Val}|Rest], Opts) when Val == true; Val == false ->
parse_opts(Rest, Opts#opts{repeated_keys = Val});
parse_opts([repeated_keys|Rest], Opts) ->
parse_opts(Rest, Opts#opts{repeated_keys = true});
parse_opts([{K, _}|Rest] = Options, Opts) ->
parse_config([{repeated_keys, Val}|Rest], Config) when Val == true; Val == false ->
parse_config(Rest, Config#config{repeated_keys = Val});
parse_config([repeated_keys|Rest], Config) ->
parse_config(Rest, Config#config{repeated_keys = true});
parse_config([{K, _}|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([K|Rest] = Options, Opts) ->
parse_config([K|Rest] = Options, Config) ->
case lists:member(K, jsx_utils:valid_flags()) of
true -> parse_opts(Rest, Opts)
; false -> erlang:error(badarg, [Options, Opts])
true -> parse_config(Rest, Config)
; false -> erlang:error(badarg, [Options, Config])
end;
parse_opts([], Opts) ->
Opts.
parse_config([], Config) ->
Config.
init(Opts) -> {parse_opts(Opts), []}.
init(Config) -> {parse_config(Config), []}.
handle_event(end_json, _) -> true;
handle_event(_, {Opts, _} = State) when Opts#opts.repeated_keys == true -> State;
handle_event(_, {Config, _} = State) when Config#config.repeated_keys == true -> State;
handle_event(start_object, {Opts, Keys}) -> {Opts, [dict:new()] ++ Keys};
handle_event(end_object, {Opts, [_|Keys]}) -> {Opts, Keys};
handle_event(start_object, {Config, Keys}) -> {Config, [dict:new()] ++ Keys};
handle_event(end_object, {Config, [_|Keys]}) -> {Config, Keys};
handle_event({key, Key}, {Opts, [CurrentKeys|Keys]}) ->
handle_event({key, Key}, {Config, [CurrentKeys|Keys]}) ->
case dict:is_key(Key, CurrentKeys) of
true -> erlang:error(badarg)
; false -> {Opts, [dict:store(Key, blah, CurrentKeys)|Keys]}
; false -> {Config, [dict:store(Key, blah, CurrentKeys)|Keys]}
end;
handle_event(_, State) -> State.
@ -97,18 +97,32 @@ handle_event(_, State) -> State.
-include_lib("eunit/include/eunit.hrl").
opts_test_() ->
config_test_() ->
[
{"empty opts", ?_assertEqual(#opts{}, parse_opts([]))},
{"bare repeated keys", ?_assertEqual(#opts{}, parse_opts([repeated_keys]))},
{"empty config", ?_assertEqual(#config{}, parse_config([]))},
{"bare repeated keys", ?_assertEqual(#config{}, parse_config([repeated_keys]))},
{"repeated keys true", ?_assertEqual(
#opts{},
parse_opts([{repeated_keys, true}])
#config{},
parse_config([{repeated_keys, true}])
)},
{"repeated keys false", ?_assertEqual(
#opts{repeated_keys=false},
parse_opts([{repeated_keys, false}])
)}
#config{repeated_keys=false},
parse_config([{repeated_keys, false}])
)},
{"invalid opt flag", ?_assertError(badarg, parse_config([error]))},
{"invalid opt tuple", ?_assertError(badarg, parse_config([{error, true}]))}
].
handle_event_test_() ->
Data = jsx:test_cases(),
[
{
Title, ?_assertEqual(
true,
lists:foldl(fun handle_event/2, {#config{}, []}, Events ++ [end_json])
)
} || {Title, _, _, Events} <- Data
].