Merge branch 'eunit'

This commit is contained in:
alisdair sullivan 2010-08-11 19:28:56 -07:00
commit dfe880e09f
44 changed files with 2207 additions and 246 deletions

View file

@ -11,7 +11,8 @@
jsx_utf32le,
jsx_eep0018,
jsx_format,
jsx_verify
jsx_verify,
jsx_test
]},
{registered, []},
{applications, [

View file

@ -5,10 +5,7 @@ expand:
./priv/backends.escript create
test: compile
./test/jsx_test.escript test/cases
prove: compile
prove ./test/jsx_test.escript
./rebar eunit
clean:
./rebar clean

View file

@ -35,6 +35,7 @@
-include("./include/jsx_types.hrl").
-spec parse(JSON::json(), Opts::jsx_opts()) -> jsx_parser_result().
parse(JSON, Opts) ->

6
rebar.config Normal file
View file

@ -0,0 +1,6 @@
%% edit eunit_test_path if you want to run your own tests, use "../" not "./" as
%% rebar changes to working dir to .eunit when running tests
{eunit_compile_opts, [{d, test}, {d, eunit_test_path, "../test/cases"}]}.
%% uncomment to get verbose output from test suite
{eunit_opts, [verbose]}.

View file

@ -31,9 +31,11 @@
-export([is_json/1, is_json/2]).
-export([format/1, format/2]).
%% types for function specifications
-include("./include/jsx_types.hrl").
%% opts record
-record(opts, {
comments = false,
@ -43,6 +45,7 @@
}).
-spec parser() -> jsx_parser().
-spec parser(Opts::jsx_opts()) -> jsx_parser().

View file

@ -28,6 +28,10 @@
-include("./include/jsx_types.hrl").
-ifdef(test).
-include_lib("eunit/include/eunit.hrl").
-endif.
-spec json_to_term(JSON::binary(), Opts::decoder_opts()) -> json().
@ -40,24 +44,32 @@ json_to_term(JSON, Opts) ->
end.
%% the jsx formatter (pretty printer) can do most of the heavy lifting in converting erlang
%% terms to json strings, but it expects a jsx event iterator. luckily, the mapping from
%% erlang terms to jsx events is straightforward and the iterator can be faked with an
%% anonymous function
-spec term_to_json(JSON::json(), Opts::encoder_opts()) -> binary().
term_to_json(List, Opts) ->
case proplists:get_value(strict, Opts, true) of
true when is_list(List) -> continue
; false -> continue
; true -> erlang:error(badarg)
; false -> continue
end,
Encoding = proplists:get_value(encoding, Opts, utf8),
jsx:format(event_generator(lists:reverse(term_to_events(List))), [{output_encoding, Encoding}] ++ Opts).
%% fake the jsx api with a closure to be passed to the pretty printer
event_generator([]) ->
fun() -> {event, end_json, fun() -> {incomplete, fun(end_stream) -> ok end} end} end;
fun() -> {event, end_json, fun() -> {incomplete, fun(end_stream) -> event_generator([]) end} end} end;
event_generator([Next|Rest]) ->
fun() -> {event, Next, event_generator(Rest)} end.
%% internal for json_to_term
%% parse opts for the decoder
opts_to_jsx_opts(Opts) ->
opts_to_jsx_opts(Opts, []).
@ -77,14 +89,19 @@ opts_to_jsx_opts([_|Rest], Acc) ->
opts_to_jsx_opts(Rest, Acc);
opts_to_jsx_opts([], Acc) ->
Acc.
%% ensure the first jsx event we get is start_object or start_array when running
%% in strict mode
collect_strict({event, Start, Next}, Acc, Opts) when Start =:= start_object; Start =:= start_array ->
collect(Next(), [[]|Acc], Opts);
collect_strict(_, _, _) ->
erlang:error(badarg).
%% collect decoder events and convert to eep0018 format
collect({event, Start, Next}, Acc, Opts) when Start =:= start_object; Start =:= start_array ->
collect(Next(), [[]|Acc], Opts);
@ -110,7 +127,7 @@ collect({event, end_json, _Next}, [[Acc]], _Opts) ->
%% the head of the accumulator and deal with it when we receive it's paired value
collect({event, {key, _} = PreKey, Next}, [Current|_] = Acc, Opts) ->
Key = event(PreKey, Opts),
case key_repeats(Key, Current) of
case decode_key_repeats(Key, Current) of
true -> erlang:error(badarg)
; false -> collect(Next(), [Key] ++ Acc, Opts)
end;
@ -124,9 +141,19 @@ collect({event, Event, Next}, [Current|Rest], Opts) when is_list(Current) ->
collect({event, Event, Next}, [Key, Current|Rest], Opts) ->
collect(Next(), [[{Key, event(Event, Opts)}] ++ Current] ++ Rest, Opts);
%% if our first returned event is {incomplete, ...} try to force end and return the
%% Event if one is returned
collect({incomplete, More}, [[]], Opts) ->
case More(end_stream) of
{event, Event, _Next} -> event(Event, Opts)
; _ -> erlang:error(badarg)
end;
%% any other event is an error
collect(_, _, _) -> erlang:error(badarg).
%% helper functions for converting jsx events to eep0018 formats
event({string, String}, _Opts) ->
unicode:characters_to_binary(String);
@ -153,8 +180,14 @@ event({float, Float}, _Opts) ->
event({literal, Literal}, _Opts) ->
Literal.
decode_key_repeats(Key, [{Key, _Value}|_Rest]) -> true;
decode_key_repeats(Key, [_|Rest]) -> decode_key_repeats(Key, Rest);
decode_key_repeats(_Key, []) -> false.
%% internal for term_to_json
%% convert eep0018 representation to jsx events. note special casing for the empty object
term_to_events([{}]) ->
[end_object, start_object];
@ -169,8 +202,7 @@ term_to_events(Term) ->
proplist_to_events([{Key, Term}|Rest], Acc) ->
Event = term_to_event(Term),
EncodedKey = key_to_event(Key),
io:format("~p~n~p~n~n", [EncodedKey, Acc]),
case key_repeats(EncodedKey, Acc) of
case encode_key_repeats(EncodedKey, Acc) of
false -> proplist_to_events(Rest, Event ++ EncodedKey ++ Acc)
; true -> erlang:error(badarg)
end;
@ -205,6 +237,16 @@ key_to_event(Key) when is_atom(Key) ->
key_to_event(Key) when is_binary(Key) ->
[{key, json_escape(Key)}].
encode_key_repeats([Key], SoFar) -> encode_key_repeats(Key, SoFar, 0).
encode_key_repeats(Key, [Key|_], 0) -> true;
encode_key_repeats(Key, [end_object|Rest], Level) -> encode_key_repeats(Key, Rest, Level + 1);
encode_key_repeats(Key, [start_object|_], 0) -> false;
encode_key_repeats(Key, [start_object|Rest], Level) -> encode_key_repeats(Key, Rest, Level - 1);
encode_key_repeats(Key, [_|Rest], Level) -> encode_key_repeats(Key, Rest, Level);
encode_key_repeats(_, [], 0) -> false.
%% conversion of floats to 'nice' decimal output. erlang's float implementation is almost
%% but not quite ieee 754. it converts negative zero to plain zero silently, and throws
@ -300,6 +342,8 @@ pow(B, E, Acc) when E band 1 == 1 -> pow(B * B, E bsr 1, B * Acc);
pow(B, E, Acc) -> pow(B * B, E bsr 1, Acc).
format(0, Digits) ->
format(Digits, ignore, ".0");
format(Dpoint, Digits) when Dpoint =< length(Digits), Dpoint > 0 ->
format(Digits, Dpoint, []);
format(Dpoint, Digits) when Dpoint > 0 ->
@ -373,9 +417,71 @@ to_hex(10) -> $a;
to_hex(X) -> X + $0.
%% common functions
%% eunit tests
-ifdef(test).
key_repeats([{key, Key}], [{key, Key}|_Rest]) -> true;
key_repeats(Key, [{Key, _Value}|_Rest]) -> true;
key_repeats(Key, [_|Rest]) -> key_repeats(Key, Rest);
key_repeats(_Key, []) -> false.
decode_test_() ->
[
{"empty object", ?_assert(json_to_term(<<"{}">>, []) =:= [{}])},
{"empty array", ?_assert(json_to_term(<<"[]">>, []) =:= [])},
{"simple object", ?_assert(json_to_term(<<"{\"a\": true, \"b\": true, \"c\": true}">>, [{label, atom}]) =:= [{a, true}, {b, true}, {c, true}])},
{"simple array", ?_assert(json_to_term(<<"[true,true,true]">>, []) =:= [true, true, true])},
{"nested structures", ?_assert(json_to_term(<<"{\"list\":[{\"list\":[{}, {}],\"object\":{}}, []],\"object\":{}}">>, [{label, atom}]) =:= [{list, [[{list, [[{}], [{}]]}, {object, [{}]}],[]]}, {object, [{}]}])},
{"numbers", ?_assert(json_to_term(<<"[-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0]">>, []) =:= [-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0])},
{"numbers (all floats)", ?_assert(json_to_term(<<"[-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0]">>, [{float, true}]) =:= [-10000000000.0, -1.0, 0.0, 0.0, 1.0, 10000000000.0, 1000000000.0])},
{"strings", ?_assert(json_to_term(<<"[\"a string\"]">>, []) =:= [<<"a string">>])},
{"literals", ?_assert(json_to_term(<<"[true,false,null]">>, []) =:= [true,false,null])},
{"naked true", ?_assert(json_to_term(<<"true">>, [{strict, false}]) =:= true)},
{"naked short number", ?_assert(json_to_term(<<"1">>, [{strict, false}]) =:= 1)},
{"float", ?_assert(json_to_term(<<"1.0">>, [{strict, false}]) =:= 1.0)},
{"naked string", ?_assert(json_to_term(<<"\"hello world\"">>, [{strict, false}]) =:= <<"hello world">>)},
{"comments", ?_assert(json_to_term(<<"[ /* a comment in an empty array */ ]">>, [{comments, true}]) =:= [])}
].
encode_test_() ->
[
{"empty object", ?_assert(term_to_json([{}], []) =:= <<"{}">>)},
{"empty array", ?_assert(term_to_json([], []) =:= <<"[]">>)},
{"simple object", ?_assert(term_to_json([{a, true}, {b, true}, {c, true}], []) =:= <<"{\"a\":true,\"b\":true,\"c\":true}">>)},
{"simple array", ?_assert(term_to_json([true, true, true], []) =:= <<"[true,true,true]">>)},
{"nested structures", ?_assert(term_to_json([{list, [[{list, [[{}], [{}]]}, {object, [{}]}],[]]}, {object, [{}]}], []) =:= <<"{\"list\":[{\"list\":[{},{}],\"object\":{}},[]],\"object\":{}}">>)},
{"numbers", ?_assert(term_to_json([-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0], []) =:= <<"[-1.0e10,-1,0.0,0,1,10000000000,1.0e9]">>)},
{"strings", ?_assert(term_to_json([<<"a string">>], []) =:= <<"[\"a string\"]">>)},
{"literals", ?_assert(term_to_json([true,false,null], []) =:= <<"[true,false,null]">>)},
{"naked true", ?_assert(term_to_json(true, [{strict, false}]) =:= <<"true">>)},
{"naked number", ?_assert(term_to_json(1, [{strict, false}]) =:= <<"1">>)},
{"float", ?_assert(term_to_json(1.0, [{strict, false}]) =:= <<"1.0">>)},
{"naked string", ?_assert(term_to_json(<<"hello world">>, [{strict, false}]) =:= <<"\"hello world\"">>)}
].
repeated_keys_test_() ->
[
{"encode", ?_assertError(badarg, term_to_json([{k, true}, {k, false}], []))},
{"decode", ?_assertError(badarg, json_to_term(<<"{\"k\": true, \"k\": false}">>, []))}
].
escape_test_() ->
[
{"json string escaping", ?_assert(json_escape(<<"\"\\\b\f\n\r\t">>) =:= <<"\\\"\\\\\\b\\f\\n\\r\\t">>)},
{"json string hex escape", ?_assert(json_escape(<<1, 2, 3, 11, 26, 30, 31>>) =:= <<"\\u0001\\u0002\\u0003\\u000b\\u001a\\u001e\\u001f">>)}
].
nice_decimal_test_() ->
[
{"0.0", ?_assert(float_to_decimal(0.0) =:= "0.0")},
{"1.0", ?_assert(float_to_decimal(1.0) =:= "1.0")},
{"-1.0", ?_assert(float_to_decimal(-1.0) =:= "-1.0")},
{"3.1234567890987654321", ?_assert(float_to_decimal(3.1234567890987654321) =:= "3.1234567890987655")},
{"1.0e23", ?_assert(float_to_decimal(1.0e23) =:= "1.0e23")},
{"0.3", ?_assert(float_to_decimal(3.0/10.0) =:= "0.3")},
{"0.0001", ?_assert(float_to_decimal(0.0001) =:= "1.0e-4")},
{"0.00000001", ?_assert(float_to_decimal(0.00000001) =:= "1.0e-8")},
{"1.0e-323", ?_assert(float_to_decimal(1.0e-323) =:= "1.0e-323")},
{"1.0e308", ?_assert(float_to_decimal(1.0e308) =:= "1.0e308")},
{"min normalized float", ?_assert(float_to_decimal(math:pow(2, -1022)) =:= "2.2250738585072014e-308")},
{"max normalized float", ?_assert(float_to_decimal((2 - math:pow(2, -52)) * math:pow(2, 1023)) =:= "1.7976931348623157e308")},
{"min denormalized float", ?_assert(float_to_decimal(math:pow(2, -1074)) =:= "5.0e-324")},
{"max denormalized float", ?_assert(float_to_decimal((1 - math:pow(2, -52)) * math:pow(2, -1022)) =:= "2.225073858507201e-308")}
].
-endif.

View file

@ -26,14 +26,19 @@
-export([format/2]).
-include("./include/jsx_types.hrl").
-ifdef(test).
-include_lib("eunit/include/eunit.hrl").
-endif.
-record(opts, {
space = 0,
indent = 0,
output_encoding = iolist,
output_encoding = utf8,
strict = true
}).
@ -76,6 +81,8 @@ parse_opts([space|Rest], Opts) ->
parse_opts(Rest, Opts#opts{space = 1});
parse_opts([{output_encoding, Val}|Rest], Opts) ->
parse_opts(Rest, Opts#opts{output_encoding = Val});
parse_opts([_|Rest], Opts) ->
parse_opts(Rest, Opts);
parse_opts([], Opts) ->
Opts.
@ -85,11 +92,21 @@ extract_parser_opts(Opts) ->
format_something({event, start_object, Next}, Opts, Level) ->
{Continue, Object} = format_object(Next(), [], Opts, Level + 1),
{Continue, [?start_object, Object, ?end_object]};
case Next() of
{event, end_object, Continue} ->
{Continue, [?start_object, ?end_object]}
; Event ->
{Continue, Object} = format_object(Event, [], Opts, Level + 1),
{Continue, [?start_object, Object, indent(Opts, Level), ?end_object]}
end;
format_something({event, start_array, Next}, Opts, Level) ->
{Continue, Array} = format_array(Next(), [], Opts, Level + 1),
{Continue, [?start_array, Array, ?end_array]};
case Next() of
{event, end_array, Continue} ->
{Continue, [?start_array, ?end_array]}
; Event ->
{Continue, Object} = format_array(Event, [], Opts, Level + 1),
{Continue, [?start_array, Object, indent(Opts, Level), ?end_array]}
end;
format_something({event, {Type, Value}, Next}, _Opts, _Level) ->
{Next, [encode(Type, Value)]}.
@ -103,7 +120,7 @@ format_object({event, {key, Key}, Next}, Acc, Opts, Level) ->
{NextNext, [Acc, indent(Opts, Level), encode(string, Key), ?colon, space(Opts), Value]}
; Else ->
format_object(Else,
[Acc, indent(Opts, Level), encode(string, Key), ?colon, space(Opts), Value, ?comma],
[Acc, indent(Opts, Level), encode(string, Key), ?colon, space(Opts), Value, ?comma, space(Opts)],
Opts,
Level
)
@ -117,7 +134,7 @@ format_array(Event, Acc, Opts, Level) ->
{event, end_array, NextNext} ->
{NextNext, [Acc, indent(Opts, Level), Value]}
; Else ->
format_array(Else, [Acc, indent(Opts, Level), Value, ?comma], Opts, Level)
format_array(Else, [Acc, indent(Opts, Level), Value, ?comma, space(Opts)], Opts, Level)
end.
@ -157,4 +174,26 @@ space(Opts) ->
case Opts#opts.space of
0 -> []
; X when X > 0 -> [ ?space || _ <- lists:seq(1, X) ]
end.
end.
%% eunit tests
-ifdef(test).
minify_test_() ->
[
{"minify object", ?_assert(format(<<" { \"key\" :\n\t \"value\"\r\r\r\n } ">>, []) =:= <<"{\"key\":\"value\"}">>)},
{"minify array", ?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>, []) =:= <<"[true,false,null]">>)}
].
opts_test_() ->
[
{"unspecified indent/space", ?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>, [space, indent]) =:= <<"[\n true, \n false, \n null\n]">>)},
{"specific indent/space", ?_assert(format(<<"\n{\n\"key\" : [],\n\"another key\" : true\n}\n">>, [{space, 2}, {indent, 4}]) =:= <<"{\n \"key\": [], \n \"another key\": true\n}">>)},
{"nested structures", ?_assert(format(<<"[{\"key\":\"value\", \"another key\": \"another value\"}, [[true, false, null]]]">>, [{space, 2}, {indent, 2}]) =:= <<"[\n {\n \"key\": \"value\", \n \"another key\": \"another value\"\n }, \n [\n [\n true, \n false, \n null\n ]\n ]\n]">>)},
{"just spaces", ?_assert(format(<<"[1,2,3]">>, [{space, 2}]) =:= <<"[1, 2, 3]">>)},
{"just indent", ?_assert(format(<<"[1.0, 2.0, 3.0]">>, [{indent, 2}]) =:= <<"[\n 1.0,\n 2.0,\n 3.0\n]">>)}
].
-endif.

170
src/jsx_test.erl Normal file
View file

@ -0,0 +1,170 @@
%% The MIT License
%% Copyright (c) 2010 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
%% in the Software without restriction, including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
-module(jsx_test).
-author("alisdairsullivan@yahoo.ca").
-ifndef(test).
-export([test/0]).
-endif.
-ifdef(test).
-include_lib("eunit/include/eunit.hrl").
-endif.
%% if not compiled with test support
-ifndef(test).
test() -> erlang:error(notest).
-else.
jsx_decoder_test_() ->
jsx_decoder_gen(load_tests(?eunit_test_path)).
jsx_decoder_gen([]) -> [];
jsx_decoder_gen(Tests) -> jsx_decoder_gen(Tests, [utf8, utf16, {utf16, little}, utf32, {utf32, little}]).
jsx_decoder_gen([_Test|Rest], []) ->
jsx_decoder_gen(Rest);
jsx_decoder_gen([Test|_] = Tests, [Encoding|Encodings]) ->
Name = lists:flatten(proplists:get_value(name, Test) ++ " :: " ++ io_lib:format("~p", [Encoding])),
JSON = unicode:characters_to_binary(proplists:get_value(json, Test), unicode, Encoding),
JSX = proplists:get_value(jsx, Test),
Flags = proplists:get_value(jsx_flags, Test, []),
{generator,
fun() ->
[{Name, ?_assert(decode(JSON, Flags) =:= JSX)}
| {generator,
fun() -> [{Name ++ " incremental", ?_assert(incremental_decode(JSON, Flags) =:= JSX)}
| jsx_decoder_gen(Tests, Encodings)]
end
}
]
end
}.
load_tests(Path) ->
%% search the specified directory for any files with the .test ending
TestSpecs = filelib:wildcard("*.test", Path),
load_tests(TestSpecs, Path, []).
load_tests([], _Dir, Acc) ->
lists:reverse(Acc);
load_tests([Test|Rest], Dir, Acc) ->
%% should alert about badly formed tests eventually, but for now just skip over them
case file:consult(Dir ++ "/" ++ Test) of
{ok, TestSpec} ->
try
ParsedTest = parse_tests(TestSpec, Dir),
load_tests(Rest, Dir, [ParsedTest] ++ Acc)
catch _:_ ->
load_tests(Rest, Dir, Acc)
end
; {error, _Reason} ->
load_tests(Rest, Dir, Acc)
end.
parse_tests(TestSpec, Dir) ->
parse_tests(TestSpec, Dir, []).
parse_tests([{json, Path}|Rest], Dir, Acc) when is_list(Path) ->
case file:read_file(Dir ++ "/" ++ Path) of
{ok, Bin} -> parse_tests(Rest, Dir, [{json, Bin}] ++ Acc)
; _ -> erlang:error(badarg)
end;
parse_tests([KV|Rest], Dir, Acc) ->
parse_tests(Rest, Dir, [KV] ++ Acc);
parse_tests([], _Dir, Acc) ->
Acc.
decode(JSON, Flags) ->
P = jsx:parser(Flags),
decode_loop(P(JSON), []).
decode_loop({event, end_json, _Next}, Acc) ->
lists:reverse([end_json] ++ Acc);
decode_loop({incomplete, More}, Acc) ->
decode_loop(More(end_stream), Acc);
decode_loop({event, E, Next}, Acc) ->
decode_loop(Next(), [E] ++ Acc).
incremental_decode(<<C:1/binary, Rest/binary>>, Flags) ->
P = jsx:parser(Flags),
incremental_decode_loop(P(C), Rest, []).
incremental_decode_loop({incomplete, Next}, <<>>, Acc) ->
incremental_decode_loop(Next(end_stream), <<>>, Acc);
incremental_decode_loop({incomplete, Next}, <<C:1/binary, Rest/binary>>, Acc) ->
incremental_decode_loop(Next(C), Rest, Acc);
incremental_decode_loop({event, end_json, _Next}, _Rest, Acc) ->
lists:reverse([end_json] ++ Acc);
incremental_decode_loop({event, Event, Next}, Rest, Acc) ->
incremental_decode_loop(Next(), Rest, [Event] ++ Acc).
multi_decode_test_() ->
[
{"multiple values in a single stream", ?_assert(multi_decode(multi_json_body(), []) =:= multi_test_result())}
].
multi_decode(JSON, Flags) ->
P = jsx:parser(Flags ++ [{multi_term, true}]),
multi_decode_loop(P(JSON), [[]]).
multi_decode_loop({incomplete, _Next}, [[]|Acc]) ->
lists:reverse(Acc);
multi_decode_loop({event, end_json, Next}, [S|Acc]) ->
multi_decode_loop(Next(), [[]|[lists:reverse(S)] ++ Acc]);
multi_decode_loop({event, E, Next}, [S|Acc]) ->
multi_decode_loop(Next(), [[E] ++ S] ++ Acc).
multi_json_body() ->
<<"0 1 -1 1e1 0.7 0.7e-1 true false null {} [] [1, 2, 3] \"hope this works\"">>.
multi_test_result() ->
[ [{integer, "0"}],
[{integer, "1"}],
[{integer, "-1"}],
[{float, "1.0e1"}],
[{float, "0.7"}],
[{float, "0.7e-1"}],
[{literal, true}],
[{literal, false}],
[{literal, null}],
[start_object, end_object],
[start_array, end_array],
[start_array, {integer, "1"}, {integer, "2"}, {integer, "3"}, end_array],
[{string, "hope this works"}]
].
-endif.

View file

@ -28,13 +28,18 @@
-include("./include/jsx_types.hrl").
-ifdef(test).
-include_lib("eunit/include/eunit.hrl").
-endif.
-spec is_json(JSON::binary(), Opts::verify_opts()) -> true | false.
is_json(JSON, Opts) ->
Encoding = proplists:get_value(encoding, Opts, utf8),
P = jsx:parser([{encoding, Encoding}]),
Comments = proplists:get_value(comments, Opts, false),
P = jsx:parser([{encoding, Encoding}, {comments, Comments}]),
case proplists:get_value(strict, Opts, true) of
true -> collect_strict(P(JSON), Opts)
; false -> collect(P(JSON), Opts)
@ -63,8 +68,51 @@ collect({event, {key, Key}, Next}, Keys) ->
collect({event, _, Next}, Keys) ->
collect(Next(), Keys);
%% needed to parse numbers that don't have trailing whitespace in less strict mode
collect({incomplete, More}, Keys) ->
collect(More(end_stream), Keys);
collect(_, _) ->
false.
%% eunit tests
-ifdef(test).
true_test_() ->
[
{"empty object", ?_assert(is_json(<<"{}">>, []) =:= true)},
{"empty array", ?_assert(is_json(<<"[]">>, []) =:= true)},
{"whitespace", ?_assert(is_json(<<" \n \t \r [true] \t \n\r ">>, []) =:= true)},
{"nested terms", ?_assert(is_json(<<"[ { \"key\": [ {}, {}, {} ], \"more key\": [{}] }, {}, [[[]]] ]">>, []) =:= true)},
{"numbers", ?_assert(is_json(<<"[ -1.0, -1, -0, 0, 1e-1, 1, 1.0, 1e1 ]">>, []) =:= true)},
{"strings", ?_assert(is_json(<<"[ \"a\", \"string\", \"in\", \"multiple\", \"acts\" ]">>, []) =:= true)},
{"literals", ?_assert(is_json(<<"[ true, false, null ]">>, []) =:= true)}
].
false_test_() ->
[
{"naked true", ?_assert(is_json(<<"true">>, []) =:= false)},
{"naked number", ?_assert(is_json(<<"1">>, []) =:= false)},
{"naked string", ?_assert(is_json(<<"\"i am not json\"">>, []) =:= false)},
{"unbalanced list", ?_assert(is_json(<<"[[[]]">>, []) =:= false)},
{"trailing comma", ?_assert(is_json(<<"[ true, false, null, ]">>, []) =:= false)},
{"unquoted key", ?_assert(is_json(<<"{ key: false }">>, []) =:= false)},
{"comments", ?_assert(is_json(<<"[ /* a comment */ ]">>, []) =:= false)}
].
less_strict_test_() ->
[
{"naked true", ?_assert(is_json(<<"true">>, [{strict, false}]) =:= true)},
{"naked number", ?_assert(is_json(<<"1">>, [{strict, false}]) =:= true)},
{"naked string", ?_assert(is_json(<<"\"i am not json\"">>, [{strict, false}]) =:= true)},
{"comments", ?_assert(is_json(<<"[ /* a comment */ ]">>, [{comments, true}]) =:= true)}
].
-endif.

File diff suppressed because one or more lines are too long

View file

@ -1 +1,29 @@
[start_array, {string, "foo"}, {string, "bar"}, {string, "baz"}, start_array, {literal, true}, end_array, start_array, {literal, false}, end_array, start_array, {literal, null}, end_array, {literal, true}, {literal, false}, {literal, null}, {float, "0.7"}, start_object, {key, "key"}, {string, "value"}, end_object, start_array, start_object, end_object, {literal, null}, {literal, null}, {literal, null}, start_array, end_array, end_array, {string, "\n\r\\"}, start_array, {integer, "-1"}, end_array, end_array, end_json].
{name, "array"}.
{jsx, [start_array,
{string,"foo"},
{string,"bar"},
{string,"baz"},
start_array,
{literal,true},
end_array,start_array,
{literal,false},
end_array,start_array,
{literal,null},
end_array,
{literal,true},
{literal,false},
{literal,null},
{float,"0.7"},
start_object,
{key,"key"},
{string,"value"},
end_object,start_array,start_object,end_object,
{literal,null},
{literal,null},
{literal,null},
start_array,end_array,end_array,
{string,"\n\r\\"},
start_array,
{integer,"-1"},
end_array,end_array,end_json]}.
{json, "array.json"}.

View file

@ -1,3 +1,14 @@
[start_array, {string, "a string"}, {integer, "1"}, start_object, {key, "key"}, start_array, end_array, {key, "another key"}, {integer, "0"}, end_object, {literal, true}, end_array, end_json].
[{comments, true}].
{name, "comments"}.
{jsx, [start_array,
{string,"a string"},
{integer,"1"},
start_object,
{key,"key"},
start_array,end_array,
{key,"another key"},
{integer,"0"},
end_object,
{literal,true},
end_array,end_json]}.
{json, "comments.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1 +1,3 @@
[start_array, start_array, start_array, end_array, end_array, end_array, end_json].
{name, "deep_array"}.
{jsx, [start_array,start_array,start_array,end_array,end_array,end_array,end_json]}.
{json, "deep_array.json"}.

View file

@ -1 +1,3 @@
[start_array, end_array, end_json].
{name, "empty_array"}.
{jsx, [start_array,end_array,end_json]}.
{json, "empty_array.json"}.

View file

@ -1,3 +1,4 @@
[start_array, end_array, end_json].
[{comments, true}].
{name, "empty_array_with_comment"}.
{jsx, [start_array,end_array,end_json]}.
{json, "empty_array_with_comment.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1 +1,3 @@
[start_object, end_object, end_json].
{name, "empty_object"}.
{jsx, [start_object,end_object,end_json]}.
{json, "empty_object.json"}.

View file

@ -1,3 +1,4 @@
[start_object, end_object, end_json].
[{comments, true}].
{name, "empty_object_with_comment"}.
{jsx, [start_object,end_object,end_json]}.
{json, "empty_object_with_comment.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1,3 +1,4 @@
[start_array, {string, [66560]}, end_array, end_json].
[{escaped_unicode, codepoint}].
{name, "encoded_surrogates"}.
{jsx, [start_array,{string,[66560]},end_array,end_json]}.
{json, "encoded_surrogates.json"}.
{jsx_flags, [{escaped_unicode,codepoint}]}.

View file

@ -1,3 +1,15 @@
[start_array, start_array, {float, "2.0e7"}, end_array, {float, "2.0e7"}, start_object, {key, "key"}, {float, "2.0e7"}, {key, "another key"}, {float, "2.0e7"}, end_object, {float, "4.2e70"}, end_array, end_json].
[{comments, true}].
{name, "exp"}.
{jsx, [start_array,start_array,
{float,"2.0e7"},
end_array,
{float,"2.0e7"},
start_object,
{key,"key"},
{float,"2.0e7"},
{key,"another key"},
{float,"2.0e7"},
end_object,
{float,"4.2e70"},
end_array,end_json]}.
{json, "exp.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1,3 +1,20 @@
[start_array, start_array, {float, "2.0"}, end_array, {float, "2.0"}, start_object, {key, "key"}, {float, "2.0e7"}, {key, "another key"}, {float, "2.0e7"}, end_object, start_object, {key, "key"}, {float, "2.0"}, {key, "another key"}, {float, "2.0"}, end_object, {float, "4.2"}, end_array, end_json].
[{comments, true}].
{name, "fraction"}.
{jsx, [start_array,start_array,
{float,"2.0"},
end_array,
{float,"2.0"},
start_object,
{key,"key"},
{float,"2.0e7"},
{key,"another key"},
{float,"2.0e7"},
end_object,start_object,
{key,"key"},
{float,"2.0"},
{key,"another key"},
{float,"2.0"},
end_object,
{float,"4.2"},
end_array,end_json]}.
{json, "fraction.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1 +1 @@
[[20], 20, {"key":20, "another key":20}, 42 ]
[[20], 20, {"key":20, "another key":20}, 42/*a comment*/ ]

View file

@ -1 +1,15 @@
[start_array, start_array, {integer, "20"}, end_array, {integer, "20"}, start_object, {key, "key"}, {integer, "20"}, {key, "another key"}, {integer, "20"}, end_object, {integer, "42"}, end_array, end_json].
{name, "integer"}.
{jsx, [start_array,start_array,
{integer,"20"},
end_array,
{integer,"20"},
start_object,
{key,"key"},
{integer,"20"},
{key,"another key"},
{integer,"20"},
end_object,
{integer,"42"},
end_array,end_json]}.
{json, "integer.json"}.
{jsx_flags, [{comments,true}]}.

View file

@ -1 +1,3 @@
[start_array,{string,[32, 119070, 32]},end_array,end_json].
{name, "multibyte_utf"}.
{jsx, [start_array,{string,[32,119070,32]},end_array,end_json]}.
{json, "multibyte_utf.json"}.

View file

@ -1 +1,3 @@
[{literal, false}, end_json].
{name, "naked_false"}.
{jsx, [{literal,false},end_json]}.
{json, "naked_false.json"}.

View file

@ -1 +1,3 @@
[{literal, null}, end_json].
{name, "naked_null"}.
{jsx, [{literal,null},end_json]}.
{json, "naked_null.json"}.

View file

@ -1 +1,3 @@
[{integer, "42"}, end_json].
{name, "naked_number_a"}.
{jsx, [{integer,"42"},end_json]}.
{json, "naked_number_a.json"}.

View file

@ -1 +1,3 @@
[{integer, "-42"}, end_json].
{name, "naked_number_b"}.
{jsx, [{integer,"-42"},end_json]}.
{json, "naked_number_b.json"}.

View file

@ -1 +1,3 @@
[{float, "-0.7"}, end_json].
{name, "naked_number_c"}.
{jsx, [{float,"-0.7"},end_json]}.
{json, "naked_number_c.json"}.

View file

@ -1 +1,3 @@
[{float, "0.7"}, end_json].
{name, "naked_number_d"}.
{jsx, [{float,"0.7"},end_json]}.
{json, "naked_number_d.json"}.

View file

@ -1 +1,3 @@
[{integer, "0"}, end_json].
{name, "naked_number_e"}.
{jsx, [{integer,"0"},end_json]}.
{json, "naked_number_e.json"}.

View file

@ -1 +1,3 @@
[{float, "1.0e100"}, end_json].
{name, "naked_number_f"}.
{jsx, [{float,"1.0e100"},end_json]}.
{json, "naked_number_f.json"}.

View file

@ -1 +1,3 @@
[{integer, "7"}, end_json].
{name, "naked_number_g"}.
{jsx, [{integer,"7"},end_json]}.
{json, "naked_number_g.json"}.

View file

@ -1 +1,3 @@
[{string, "this is a naked string"}, end_json].
{name, "naked_string"}.
{jsx, [{string,"this is a naked string"},end_json]}.
{json, "naked_string.json"}.

View file

@ -1 +1,3 @@
[{literal, true}, end_json].
{name, "naked_true"}.
{jsx, [{literal,true},end_json]}.
{json, "naked_true.json"}.

View file

@ -1 +1,14 @@
[start_array, start_array, {integer, "-0"}, end_array, {integer, "-0"}, start_object, {key, "key"}, {integer, "-0"}, {key, "another key"}, {integer, "-0"}, end_object, {integer, "-0"}, end_array, end_json].
{name, "negative_zero"}.
{jsx, [start_array,start_array,
{integer,"-0"},
end_array,
{integer,"-0"},
start_object,
{key,"key"},
{integer,"-0"},
{key,"another key"},
{integer,"-0"},
end_object,
{integer,"-0"},
end_array,end_json]}.
{json, "negative_zero.json"}.

View file

@ -1 +1,23 @@
[ start_array, {integer, "1"}, {integer, "2"}, {integer, "3"}, {integer, "4"}, {integer, "5"}, {integer, "6"}, {integer, "7"}, {integer, "8"}, {integer, "9"}, {integer, "42"}, {integer, "127"}, {integer, "99999999999999999999999999999"}, {float, "1.0e1"}, {float, "1.0e1"}, {float, "1.0e1"}, {float, "1.325e478534"}, {integer, "-1"}, {float, "-1.0e-1"}, {float, "3.7e-57834235"}, end_array, end_json].
{name, "numbers"}.
{jsx, [start_array,
{integer,"1"},
{integer,"2"},
{integer,"3"},
{integer,"4"},
{integer,"5"},
{integer,"6"},
{integer,"7"},
{integer,"8"},
{integer,"9"},
{integer,"42"},
{integer,"127"},
{integer,"99999999999999999999999999999"},
{float,"1.0e1"},
{float,"1.0e1"},
{float,"1.0e1"},
{float,"1.325e478534"},
{integer,"-1"},
{float,"-1.0e-1"},
{float,"3.7e-57834235"},
end_array,end_json]}.
{json, "numbers.json"}.

View file

@ -1 +1,22 @@
[start_object, {key, "foo"}, {string, "bar"}, {key, "baz"}, {literal, true}, {key, "false"}, {literal, null}, {key, "object"}, start_object, {key, "key"}, {string, "value"}, end_object, {key, "list"}, start_array, {literal, null}, {literal, null}, {literal, null}, start_array, end_array, {string, "\n\r\\"}, end_array, end_object, end_json].
{name, "object"}.
{jsx, [start_object,
{key,"foo"},
{string,"bar"},
{key,"baz"},
{literal,true},
{key,"false"},
{literal,null},
{key,"object"},
start_object,
{key,"key"},
{string,"value"},
end_object,
{key,"list"},
start_array,
{literal,null},
{literal,null},
{literal,null},
start_array,end_array,
{string,"\n\r\\"},
end_array,end_object,end_json]}.
{json, "object.json"}.

View file

@ -1 +1,5 @@
[start_array, {string, "this is a random string with \n embedded escapes in it"}, end_array, end_json].
{name, "string"}.
{jsx, [start_array,
{string,"this is a random string with \n embedded escapes in it"},
end_array,end_json]}.
{json, "string.json"}.

View file

@ -1,3 +1,12 @@
[start_array, {string, "\""}, {string, "\\"}, {string, "/"}, {string, "\b"}, {string, "\f"}, {string, "\n"}, {string, "\r"}, {string, "\t"}, end_array, end_json].
{name, "string_escapes"}.
{jsx, [start_array,
{string,"\""},
{string,"\\"},
{string,"/"},
{string,"\b"},
{string,"\f"},
{string,"\n"},
{string,"\r"},
{string,"\t"},
end_array,end_json]}.
{json, "string_escapes.json"}.

View file

@ -1,3 +1,7 @@
[start_array, {string, "arabic letter alef: "}, {string, [16#0627]}, end_array, end_json].
[{escaped_unicode, codepoint}].
{name, "unicode_to_codepoint"}.
{jsx, [start_array,
{string,"arabic letter alef: "},
{string,[1575]},
end_array,end_json]}.
{json, "unicode_to_codepoint.json"}.
{jsx_flags, [{escaped_unicode,codepoint}]}.

View file

@ -1,3 +1,7 @@
[start_array, {string, "arabic letter alef: "}, {string, "\\u0627"}, end_array, end_json].
[{escaped_unicode, ascii}].
{name, "unicode_unconverted"}.
{jsx, [start_array,
{string,"arabic letter alef: "},
{string,"\\u0627"},
end_array,end_json]}.
{json, "unicode_unconverted.json"}.
{jsx_flags, [{escaped_unicode,ascii}]}.

View file

@ -1 +1,3 @@
[start_array, {float, "0.3"}, end_array, end_json].
{name, "whitespace"}.
{jsx, [start_array,{float,"0.3"},end_array,end_json]}.
{json, "whitespace.json"}.

View file

@ -1 +1,14 @@
[start_array, start_array, {integer, "0"}, end_array, {integer, "0"}, start_object, {key, "key"}, {integer, "0"}, {key, "another key"}, {integer, "0"}, end_object, {integer, "0"}, end_array, end_json].
{name, "zero"}.
{jsx, [start_array,start_array,
{integer,"0"},
end_array,
{integer,"0"},
start_object,
{key,"key"},
{integer,"0"},
{key,"another key"},
{integer,"0"},
end_object,
{integer,"0"},
end_array,end_json]}.
{json, "zero.json"}.

View file

@ -1,166 +0,0 @@
#!/usr/bin/env escript
%% The MIT License
%% Copyright (c) 2010 Alisdair Sullivan <alisdairsullivan@yahoo.ca>
%% Permission is hereby granted, free of charge, to any person obtaining a copy
%% of this software and associated documentation files (the "Software"), to deal
%% in the Software without restriction, including without limitation the rights
%% to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
%% copies of the Software, and to permit persons to whom the Software is
%% furnished to do so, subject to the following conditions:
%% The above copyright notice and this permission notice shall be included in
%% all copies or substantial portions of the Software.
%% THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
%% IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
%% FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
%% AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
%% LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
%% OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
%% THE SOFTWARE.
-module(jsx_test).
-export([main/1]).
-define(to_json(X, Y, N),
etap:is(jsx:term_to_json(X), Y, N)
).
-define(to_erep(X, Y, N),
etap:is(jsx:json_to_term(X), Y, N)
).
main([]) ->
test("./test/cases");
main([Path]) ->
test(Path).
test(Dir) ->
code:add_path("ebin"),
ValidJSONTests = load_tests(Dir),
etap:plan((length(ValidJSONTests) * 10) + 13),
run_jsx_tests(ValidJSONTests),
etap:is(multi_decode(multi_json_body(), []), multi_test_result(), "multi terms"),
?to_erep(<<"{}">>, [{}], "empty object to erep"),
?to_json([{}], <<"{}">>, "empty object to json"),
?to_erep(<<"[]">>, [], "empty array to erep"),
?to_json([], <<"[]">>, "empty array to json"),
?to_erep(<<"{ \"key\": \"value\", \"another key\": [] }">>, [{<<"key">>, <<"value">>}, {<<"another key">>, []}], "object to erep"),
?to_json([{<<"key">>, <<"value">>}, {<<"another key">>, []}], <<"{\"key\":\"value\",\"another key\":[]}">>, "object to json"),
?to_erep(<<"[true, 1, -0.5e7, \"hello world\"]">>, [true, 1, -0.5e7, <<"hello world">>], "array to erep"),
?to_json([true, 1, -0.5e7, <<"hello world">>], <<"[true,1,-5000000.0,\"hello world\"]">>, "array to json"),
?to_erep(<<"[[[]]]">>, [[[]]], "deep array to erep"),
?to_json([[[]]], <<"[[[]]]">>, "deep array to json"),
?to_erep(<<"{\"a\":{\"a\":{\"a\":{}}}}">>, [{<<"a">>, [{<<"a">>, [{<<"a">>, [{}]}]}]}], "deep object to erep"),
?to_json([{<<"a">>, [{<<"a">>, [{<<"a">>, [{}]}]}]}], <<"{\"a\":{\"a\":{\"a\":{}}}}">>, "deep object to json"),
etap:end_tests().
load_tests(Dir) ->
TestSpecs = filelib:wildcard("*.test", Dir),
load_tests(TestSpecs, Dir, []).
load_tests([], _Dir, Acc) ->
lists:reverse(Acc);
load_tests([Test|Rest], Dir, Acc) ->
try
TestName = filename:basename(Test, ".test"),
{ok, JSON} = file:read_file(Dir ++ "/" ++ TestName ++ ".json"),
case file:consult(Dir ++ "/" ++ Test) of
{ok, [Events]} ->
load_tests(Rest, Dir, [{TestName, JSON, Events, []}] ++ Acc)
; {ok, [Events, Flags]} ->
load_tests(Rest, Dir, [{TestName, JSON, Events, Flags}] ++ Acc)
end
catch _:_ -> load_tests(Rest, Dir, Acc) end.
run_jsx_tests([]) ->
ok;
run_jsx_tests([{TestName, JSON, Events, Flags}|Rest]) ->
etap:is(decode(JSON, Flags), Events, TestName ++ ": utf8"),
etap:is(incremental_decode(JSON, Flags), Events, TestName ++ ": incremental utf8"),
etap:is(decode(to_utf16(JSON), Flags), Events, TestName ++ ": utf16"),
etap:is(incremental_decode(to_utf16(JSON), Flags), Events, TestName ++ ": incremental utf16"),
etap:is(decode(to_utf16le(JSON), Flags), Events, TestName ++ ": utf16le"),
etap:is(incremental_decode(to_utf16le(JSON), Flags), Events, TestName ++ ": incremental utf16le"),
etap:is(decode(to_utf32(JSON), Flags), Events, TestName ++ ": utf32"),
etap:is(incremental_decode(to_utf32(JSON), Flags), Events, TestName ++ ": incremental utf32"),
etap:is(decode(to_utf32le(JSON), Flags), Events, TestName ++ ": utf32le"),
etap:is(incremental_decode(to_utf32le(JSON), Flags), Events, TestName ++ ": incremental utf32le"),
run_jsx_tests(Rest).
decode(JSON, Flags) ->
P = jsx:parser(Flags),
decode_loop(P(JSON), []).
decode_loop({event, end_json, _Next}, Acc) ->
lists:reverse([end_json] ++ Acc);
decode_loop({incomplete, More}, Acc) ->
decode_loop(More(end_stream), Acc);
decode_loop({event, E, Next}, Acc) ->
decode_loop(Next(), [E] ++ Acc).
incremental_decode(<<C:1/binary, Rest/binary>>, Flags) ->
P = jsx:parser(Flags),
incremental_decode_loop(P(C), Rest, []).
incremental_decode_loop({incomplete, Next}, <<>>, Acc) ->
incremental_decode_loop(Next(end_stream), <<>>, Acc);
incremental_decode_loop({incomplete, Next}, <<C:1/binary, Rest/binary>>, Acc) ->
incremental_decode_loop(Next(C), Rest, Acc);
incremental_decode_loop({event, end_json, _Next}, _Rest, Acc) ->
lists:reverse([end_json] ++ Acc);
incremental_decode_loop({event, Event, Next}, Rest, Acc) ->
incremental_decode_loop(Next(), Rest, [Event] ++ Acc).
multi_decode(JSON, Flags) ->
P = jsx:parser(Flags ++ [{multi_term, true}]),
multi_decode_loop(P(JSON), [[]]).
multi_decode_loop({incomplete, _Next}, [[]|Acc]) ->
lists:reverse(Acc);
multi_decode_loop({event, end_json, Next}, [S|Acc]) ->
multi_decode_loop(Next(), [[]|[lists:reverse(S)] ++ Acc]);
multi_decode_loop({event, E, Next}, [S|Acc]) ->
multi_decode_loop(Next(), [[E] ++ S] ++ Acc).
to_utf16(Bin) -> unicode:characters_to_binary(Bin, utf8, utf16).
to_utf16le(Bin) -> unicode:characters_to_binary(Bin, utf8, {utf16,little}).
to_utf32(Bin) -> unicode:characters_to_binary(Bin, utf8, utf32).
to_utf32le(Bin) -> unicode:characters_to_binary(Bin, utf8, {utf32,little}).
multi_json_body() ->
<<"0 1 -1 1e1 0.7 0.7e-1 true false null {} [] [1, 2, 3] \"hope this works\"">>.
multi_test_result() ->
[ [{integer, "0"}],
[{integer, "1"}],
[{integer, "-1"}],
[{float, "1.0e1"}],
[{float, "0.7"}],
[{float, "0.7e-1"}],
[{literal, true}],
[{literal, false}],
[{literal, null}],
[start_object, end_object],
[start_array, end_array],
[start_array, {integer, "1"}, {integer, "2"}, {integer, "3"}, end_array],
[{string, "hope this works"}]
].