major whitespace updates to get line lengths down to < 80 characters whenever possible (a few tests ignore this). srx/jsx_test.erl was not touched because it's shortly to be replaced
This commit is contained in:
parent
ac5e0e4fe3
commit
a86dec09ef
6 changed files with 998 additions and 414 deletions
|
@ -23,5 +23,9 @@
|
||||||
|
|
||||||
|
|
||||||
-define(is_utf_encoding(X),
|
-define(is_utf_encoding(X),
|
||||||
X == utf8; X == utf16; X == utf32; X == {utf16, little}; X == {utf32, little}
|
X == utf8
|
||||||
|
; X == utf16
|
||||||
|
; X == utf32
|
||||||
|
; X == {utf16, little}
|
||||||
|
; X == {utf32, little}
|
||||||
).
|
).
|
File diff suppressed because it is too large
Load diff
211
src/jsx.erl
211
src/jsx.erl
|
@ -26,7 +26,6 @@
|
||||||
%% @version really, really beta
|
%% @version really, really beta
|
||||||
%% @doc this module defines the interface to the jsx json parsing library
|
%% @doc this module defines the interface to the jsx json parsing library
|
||||||
|
|
||||||
|
|
||||||
-module(jsx).
|
-module(jsx).
|
||||||
|
|
||||||
|
|
||||||
|
@ -70,7 +69,11 @@
|
||||||
%% | {multi_term, true | false}
|
%% | {multi_term, true | false}
|
||||||
%% | {encoding, auto | supported_utf()}.
|
%% | {encoding, auto | supported_utf()}.
|
||||||
|
|
||||||
%% @type supported_utf() = utf8 | utf16 | {utf16, little} | utf32 | {utf32, little}.
|
%% @type supported_utf() = utf8
|
||||||
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}.
|
||||||
|
|
||||||
%% @type eep0018() = eep0018_object() | eep0018_array().
|
%% @type eep0018() = eep0018_object() | eep0018_array().
|
||||||
|
|
||||||
|
@ -79,7 +82,13 @@
|
||||||
|
|
||||||
%% @type eep0018_key() = binary() | atom().
|
%% @type eep0018_key() = binary() | atom().
|
||||||
|
|
||||||
%% @type eep0018_term() = eep0018_array() | eep0018_object() | eep0018_string() | eep0018_number() | true | false | null.
|
%% @type eep0018_term() = eep0018_array()
|
||||||
|
%% | eep0018_object()
|
||||||
|
%% | eep0018_string()
|
||||||
|
%% | eep0018_number()
|
||||||
|
%% | true
|
||||||
|
%% | false
|
||||||
|
%% | null.
|
||||||
|
|
||||||
%% @type eep0018_string() = binary().
|
%% @type eep0018_string() = binary().
|
||||||
|
|
||||||
|
@ -127,7 +136,8 @@ parser() ->
|
||||||
|
|
||||||
%% @spec parser(Opts::jsx_opts()) -> jsx_parser()
|
%% @spec parser(Opts::jsx_opts()) -> jsx_parser()
|
||||||
%% @doc
|
%% @doc
|
||||||
%% produces a function which takes a binary which may or may not represent an encoded json document and returns a generator
|
%% produces a function which takes a binary which may or may not represent an
|
||||||
|
%% encoded json document and returns a generator
|
||||||
%%
|
%%
|
||||||
%% options:
|
%% options:
|
||||||
%% <ul>
|
%% <ul>
|
||||||
|
@ -137,17 +147,23 @@ parser() ->
|
||||||
%% false</p></li>
|
%% false</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoded_unicode, ascii | codepoint | none}
|
%% <li>{encoded_unicode, ascii | codepoint | none}
|
||||||
%% <p>if a \uXXXX escape sequence is encountered within a key or string,
|
%% <p>if a \uXXXX escape sequence is encountered within a key or
|
||||||
%% this option controls how it is interpreted. none makes no attempt
|
%% string, this option controls how it is interpreted. none makes no
|
||||||
%% to interpret the value, leaving it unconverted. ascii will convert
|
%% attempt to interpret the value, leaving it unconverted. ascii will
|
||||||
%% any value that falls within the ascii range. codepoint will convert
|
%% convert any value that falls within the ascii range. codepoint will
|
||||||
%% any value that is a valid unicode codepoint. note that unicode
|
%% convert any value that is a valid unicode codepoint. note that
|
||||||
%% non-characters (including badly formed surrogates) will never be
|
%% unicode non-characters (including badly formed surrogates) will
|
||||||
%% converted. codepoint is the default</p></li>
|
%% never be converted. codepoint is the default</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoding, auto | utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, auto
|
||||||
%% <p>attempt to parse the binary using the specified encoding. auto will
|
%% | utf8
|
||||||
%% auto detect any supported encoding and is the default</p></li>
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
|
%% <p>attempt to parse the binary using the specified encoding. auto
|
||||||
|
%% will auto detect any supported encoding and is the default</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{multi_term, true | false}
|
%% <li>{multi_term, true | false}
|
||||||
%% <p>usually, documents will be parsed in full before the end_json
|
%% <p>usually, documents will be parsed in full before the end_json
|
||||||
|
@ -183,25 +199,32 @@ json_to_term(JSON) ->
|
||||||
%% options:
|
%% options:
|
||||||
%% <ul>
|
%% <ul>
|
||||||
%% <li>{strict, true | false}
|
%% <li>{strict, true | false}
|
||||||
%% <p>by default, attempting to convert unwrapped json values (numbers, strings and
|
%% <p>by default, attempting to convert unwrapped json values (numbers,
|
||||||
%% the atoms true, false and null) result in a badarg exception. if strict equals
|
%% strings and the atoms true, false and null) result in a badarg
|
||||||
%% false, these are instead decoded to their equivalent eep0018 value. default is
|
%% exception. if strict equals false, these are instead decoded to
|
||||||
%% false</p>
|
%% their equivalent eep0018 value. default is false</p>
|
||||||
%%
|
%%
|
||||||
%% <p>note that there is a problem of ambiguity when parsing unwrapped json
|
%% <p>note that there is a problem of ambiguity when parsing unwrapped
|
||||||
%% numbers that requires special handling</p>
|
%% json numbers that requires special handling</p>
|
||||||
%%
|
%%
|
||||||
%% <p>an unwrapped json number has no unambiguous end marker like a json object,
|
%% <p>an unwrapped json number has no unambiguous end marker like a
|
||||||
%% array or string. `1', `12' and `123' may all represent either a complete json
|
%% json object, array or string. `1', `12' and `123' may all represent
|
||||||
%% number or just the beginning of one. in this case, the parser will always
|
%% either a complete json number or just the beginning of one. in this
|
||||||
%% return `{incomplete, More}' rather than potentially terminate before input
|
%% case, the parser will always return `{incomplete, More}' rather than
|
||||||
%% is exhausted. to force termination, `More/1' may be called with the atom
|
%% potentially terminate before input is exhausted. to force
|
||||||
%% `end_stream' as it's argument. note also that numbers followed by whitespace
|
%% termination, `More/1' may be called with the atom `end_stream' as
|
||||||
%% will be parsed correctly</p></li>
|
%% it's argument. note also that numbers followed by whitespace will be
|
||||||
|
%% parsed correctly</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoding, auto | utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, auto
|
||||||
%% <p>assume the binary is encoded using the specified binary. default is auto, which
|
%% | utf8
|
||||||
%% attempts to autodetect the encoding</p></li>
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
|
%% <p>assume the binary is encoded using the specified binary. default
|
||||||
|
%% is auto, which attempts to autodetect the encoding</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{comments, true | false}
|
%% <li>{comments, true | false}
|
||||||
%% <p>if true, json documents that contain c style (/* ... */) comments
|
%% <p>if true, json documents that contain c style (/* ... */) comments
|
||||||
|
@ -230,29 +253,38 @@ term_to_json(JSON) ->
|
||||||
|
|
||||||
%% @spec term_to_json(JSON::eep0018(), Opts::encoder_opts()) -> binary()
|
%% @spec term_to_json(JSON::eep0018(), Opts::encoder_opts()) -> binary()
|
||||||
%% @doc
|
%% @doc
|
||||||
%% takes the erlang representation of a json object (as defined in eep0018) and returns a (binary encoded) json string
|
%% takes the erlang representation of a json object (as defined in eep0018) and
|
||||||
|
%% returns a (binary encoded) json string
|
||||||
%%
|
%%
|
||||||
%% options:
|
%% options:
|
||||||
%% <ul>
|
%% <ul>
|
||||||
%% <li>{strict, true | false}
|
%% <li>{strict, true | false}
|
||||||
%% <p>by default, attempting to convert unwrapped json values (numbers,
|
%% <p>by default, attempting to convert unwrapped json values (numbers,
|
||||||
%% strings and the atoms true, false and null) result in a badarg exception.
|
%% strings and the atoms true, false and null) result in a badarg
|
||||||
%% if strict equals false, these are instead json encoded. default is false</p></li>
|
%% exception. if strict equals false, these are instead json encoded.
|
||||||
|
%% default is false</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoding, utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, utf8
|
||||||
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
%% <p>the encoding of the resulting binary. default is utf8</p></li>
|
%% <p>the encoding of the resulting binary. default is utf8</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>space
|
%% <li>space
|
||||||
%% <p>space is equivalent to {space, 1}</p></li>
|
%% <p>space is equivalent to {space, 1}</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{space, N}
|
%% <li>{space, N}
|
||||||
%% <p>place N spaces after each colon and comma in the resulting binary. default is zero</p></li>
|
%% <p>place N spaces after each colon and comma in the resulting
|
||||||
|
%% binary. default is zero</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>indent
|
%% <li>indent
|
||||||
%% <p>indent is equivalent to {indent, 1}</p></li>
|
%% <p>indent is equivalent to {indent, 1}</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{indent, N}
|
%% <li>{indent, N}
|
||||||
%% <p>indent each 'level' of the json structure by N spaces. default is zero</p></li>
|
%% <p>indent each 'level' of the json structure by N spaces. default is
|
||||||
|
%% zero</p></li>
|
||||||
%% </ul>
|
%% </ul>
|
||||||
%% @end
|
%% @end
|
||||||
|
|
||||||
|
@ -273,13 +305,19 @@ is_json(JSON) ->
|
||||||
%% options:
|
%% options:
|
||||||
%% <ul>
|
%% <ul>
|
||||||
%% <li>{strict, true | false}
|
%% <li>{strict, true | false}
|
||||||
%% <p>by default, unwrapped json values (numbers, strings and the atoms
|
%% <p>by default, unwrapped json values (numbers, strings and the
|
||||||
%% true, false and null) return false. if strict equals true, is_json
|
%% atoms true, false and null) return false. if strict equals true,
|
||||||
%% returns true. default is false</p></li>
|
%% is_json returns true. default is false</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoding, auto | utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, auto
|
||||||
%% <p>assume the binary is encoded using the specified binary. default is auto,
|
%% | utf8
|
||||||
%% which attempts to autodetect the encoding</p></li>
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
|
%% <p>assume the binary is encoded using the specified binary. default
|
||||||
|
%% is auto, which attempts to autodetect the encoding</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{comments, true | false}
|
%% <li>{comments, true | false}
|
||||||
%% <p>if true, json documents that contain c style (/* ... */) comments
|
%% <p>if true, json documents that contain c style (/* ... */) comments
|
||||||
|
@ -300,20 +338,32 @@ format(JSON) ->
|
||||||
|
|
||||||
%% @spec format(JSON::binary(), Opts::format_opts()) -> binary()
|
%% @spec format(JSON::binary(), Opts::format_opts()) -> binary()
|
||||||
%% @doc
|
%% @doc
|
||||||
%% formats a binary encoded json string according to the options chose. the defaults will produced a string stripped of all whitespace
|
%% formats a binary encoded json string according to the options chose. the
|
||||||
|
%% defaults will produced a string stripped of all whitespace
|
||||||
%%
|
%%
|
||||||
%% options:
|
%% options:
|
||||||
%% <ul>
|
%% <ul>
|
||||||
%% <li>{strict, true | false}
|
%% <li>{strict, true | false}
|
||||||
%% <p>by default, unwrapped json values (numbers, strings and the atoms
|
%% <p>by default, unwrapped json values (numbers, strings and the
|
||||||
%% true, false and null) result in an error. if strict equals true, they
|
%% atoms true, false and null) result in an error. if strict equals
|
||||||
%% are treated as valid json. default is false</p></li>
|
%% true, they are treated as valid json. default is false</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{encoding, auto | utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, auto
|
||||||
%% <p>assume the binary is encoded using the specified binary. default is auto,
|
%% | utf8
|
||||||
%% which attempts to autodetect the encoding</p></li>
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
|
%% <p>assume the binary is encoded using the specified binary. default
|
||||||
|
%% is auto, which attempts to autodetect the encoding</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{output_encoding, utf8 | utf16 | {utf16, little} | utf32 | {utf32, little} }
|
%% <li>{encoding, utf8
|
||||||
|
%% | utf16
|
||||||
|
%% | {utf16, little}
|
||||||
|
%% | utf32
|
||||||
|
%% | {utf32, little}
|
||||||
|
%% }
|
||||||
%% <p>the encoding of the resulting binary. default is utf8</p></li>
|
%% <p>the encoding of the resulting binary. default is utf8</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{comments, true | false}
|
%% <li>{comments, true | false}
|
||||||
|
@ -325,13 +375,15 @@ format(JSON) ->
|
||||||
%% <p>space is equivalent to {space, 1}</p></li>
|
%% <p>space is equivalent to {space, 1}</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{space, N}
|
%% <li>{space, N}
|
||||||
%% <p>place N spaces after each colon and comma in the resulting binary. default is zero</p></li>
|
%% <p>place N spaces after each colon and comma in the resulting
|
||||||
|
%% binary. default is zero</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>indent
|
%% <li>indent
|
||||||
%% <p>indent is equivalent to {indent, 1}</p></li>
|
%% <p>indent is equivalent to {indent, 1}</p></li>
|
||||||
%%
|
%%
|
||||||
%% <li>{indent, N}
|
%% <li>{indent, N}
|
||||||
%% <p>indent each 'level' of the json structure by N spaces. default is zero</p></li>
|
%% <p>indent each 'level' of the json structure by N spaces. default is
|
||||||
|
%% zero</p></li>
|
||||||
%% </ul>
|
%% </ul>
|
||||||
%% @end
|
%% @end
|
||||||
|
|
||||||
|
@ -340,9 +392,17 @@ format(JSON, Opts) ->
|
||||||
|
|
||||||
|
|
||||||
%% @spec eventify(List::list()) -> jsx_parser_result()
|
%% @spec eventify(List::list()) -> jsx_parser_result()
|
||||||
%% @doc fake the jsx api for any list. useful if you want to serialize a structure to json using the pretty printer, or verify a sequence could be valid json
|
%% @doc fake the jsx api for any list. useful if you want to serialize a
|
||||||
|
%% structure to json using the pretty printer, or verify a sequence could be
|
||||||
|
%% valid json
|
||||||
eventify([]) ->
|
eventify([]) ->
|
||||||
fun() -> {incomplete, fun(List) when is_list(List) -> eventify(List); (_) -> erlang:error(badarg) end} end;
|
fun() ->
|
||||||
|
{incomplete, fun(List) when is_list(List) ->
|
||||||
|
eventify(List)
|
||||||
|
; (_) ->
|
||||||
|
erlang:error(badarg)
|
||||||
|
end}
|
||||||
|
end;
|
||||||
eventify([Next|Rest]) ->
|
eventify([Next|Rest]) ->
|
||||||
fun() -> {event, Next, eventify(Rest)} end.
|
fun() -> {event, Next, eventify(Rest)} end.
|
||||||
|
|
||||||
|
@ -352,43 +412,50 @@ eventify([Next|Rest]) ->
|
||||||
|
|
||||||
|
|
||||||
%% encoding detection
|
%% encoding detection
|
||||||
%% first check to see if there's a bom, if not, use the rfc4627 method for determining
|
%% first check to see if there's a bom, if not, use the rfc4627 method for
|
||||||
%% encoding. this function makes some assumptions about the validity of the stream
|
%% determining encoding. this function makes some assumptions about the
|
||||||
%% which may delay failure later than if an encoding is explicitly provided
|
%% validity of the stream which may delay failure later than if an encoding is
|
||||||
|
%% explicitly provided
|
||||||
|
|
||||||
detect_encoding(OptsList) ->
|
detect_encoding(OptsList) ->
|
||||||
fun(Stream) -> detect_encoding(Stream, OptsList) end.
|
fun(Stream) -> detect_encoding(Stream, OptsList) end.
|
||||||
|
|
||||||
%% utf8 bom detection
|
%% utf8 bom detection
|
||||||
detect_encoding(<<16#ef, 16#bb, 16#bf, Rest/binary>>, Opts) -> (jsx_utf8:parser(Opts))(Rest);
|
detect_encoding(<<16#ef, 16#bb, 16#bf, Rest/binary>>, Opts) ->
|
||||||
%% utf32-little bom detection (this has to come before utf16-little or it'll match that)
|
(jsx_utf8:parser(Opts))(Rest);
|
||||||
detect_encoding(<<16#ff, 16#fe, 0, 0, Rest/binary>>, Opts) -> (jsx_utf32le:parser(Opts))(Rest);
|
%% utf32-little bom detection (this has to come before utf16-little or it'll
|
||||||
|
%% match that)
|
||||||
|
detect_encoding(<<16#ff, 16#fe, 0, 0, Rest/binary>>, Opts) ->
|
||||||
|
(jsx_utf32le:parser(Opts))(Rest);
|
||||||
%% utf16-big bom detection
|
%% utf16-big bom detection
|
||||||
detect_encoding(<<16#fe, 16#ff, Rest/binary>>, Opts) -> (jsx_utf16:parser(Opts))(Rest);
|
detect_encoding(<<16#fe, 16#ff, Rest/binary>>, Opts) ->
|
||||||
|
(jsx_utf16:parser(Opts))(Rest);
|
||||||
%% utf16-little bom detection
|
%% utf16-little bom detection
|
||||||
detect_encoding(<<16#ff, 16#fe, Rest/binary>>, Opts) -> (jsx_utf16le:parser(Opts))(Rest);
|
detect_encoding(<<16#ff, 16#fe, Rest/binary>>, Opts) ->
|
||||||
|
(jsx_utf16le:parser(Opts))(Rest);
|
||||||
%% utf32-big bom detection
|
%% utf32-big bom detection
|
||||||
detect_encoding(<<0, 0, 16#fe, 16#ff, Rest/binary>>, Opts) -> (jsx_utf32:parser(Opts))(Rest);
|
detect_encoding(<<0, 0, 16#fe, 16#ff, Rest/binary>>, Opts) ->
|
||||||
|
(jsx_utf32:parser(Opts))(Rest);
|
||||||
|
|
||||||
%% utf32-little null order detection
|
%% utf32-little null order detection
|
||||||
detect_encoding(<<X, 0, 0, 0, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
detect_encoding(<<X, 0, 0, 0, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
||||||
(jsx_utf32le:parser(Opts))(JSON);
|
(jsx_utf32le:parser(Opts))(JSON);
|
||||||
%% utf16-big null order detection
|
|
||||||
detect_encoding(<<0, X, 0, Y, _Rest/binary>> = JSON, Opts) when X =/= 0, Y =/= 0 ->
|
|
||||||
(jsx_utf16:parser(Opts))(JSON);
|
|
||||||
%% utf16-little null order detection
|
|
||||||
detect_encoding(<<X, 0, Y, 0, _Rest/binary>> = JSON, Opts) when X =/= 0, Y =/= 0 ->
|
|
||||||
(jsx_utf16le:parser(Opts))(JSON);
|
|
||||||
%% utf32-big null order detection
|
%% utf32-big null order detection
|
||||||
detect_encoding(<<0, 0, 0, X, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
detect_encoding(<<0, 0, 0, X, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
||||||
(jsx_utf32:parser(Opts))(JSON);
|
(jsx_utf32:parser(Opts))(JSON);
|
||||||
|
%% utf16-little null order detection
|
||||||
|
detect_encoding(<<X, 0, _, 0, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
||||||
|
(jsx_utf16le:parser(Opts))(JSON);
|
||||||
|
%% utf16-big null order detection
|
||||||
|
detect_encoding(<<0, X, 0, _, _Rest/binary>> = JSON, Opts) when X =/= 0 ->
|
||||||
|
(jsx_utf16:parser(Opts))(JSON);
|
||||||
%% utf8 null order detection
|
%% utf8 null order detection
|
||||||
detect_encoding(<<X, Y, _Rest/binary>> = JSON, Opts) when X =/= 0, Y =/= 0 ->
|
detect_encoding(<<X, Y, _Rest/binary>> = JSON, Opts) when X =/= 0, Y =/= 0 ->
|
||||||
(jsx_utf8:parser(Opts))(JSON);
|
(jsx_utf8:parser(Opts))(JSON);
|
||||||
|
|
||||||
%% a problem, to autodetect naked single digits' encoding, there is not enough data
|
%% a problem, to autodetect naked single digits' encoding, there is not enough
|
||||||
%% to conclusively determine the encoding correctly. below is an attempt to solve
|
%% data to conclusively determine the encoding correctly. below is an attempt
|
||||||
%% the problem
|
%% to solve the problem
|
||||||
detect_encoding(<<X>>, Opts) when X =/= 0 ->
|
detect_encoding(<<X>>, Opts) when X =/= 0 ->
|
||||||
{incomplete,
|
{incomplete,
|
||||||
fun(end_stream) ->
|
fun(end_stream) ->
|
||||||
|
|
|
@ -33,7 +33,6 @@
|
||||||
|
|
||||||
-include("./include/jsx_common.hrl").
|
-include("./include/jsx_common.hrl").
|
||||||
|
|
||||||
|
|
||||||
-ifdef(TEST).
|
-ifdef(TEST).
|
||||||
-include_lib("eunit/include/eunit.hrl").
|
-include_lib("eunit/include/eunit.hrl").
|
||||||
-endif.
|
-endif.
|
||||||
|
@ -48,10 +47,10 @@ json_to_term(JSON, Opts) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
||||||
%% the jsx formatter (pretty printer) can do most of the heavy lifting in converting erlang
|
%% the jsx formatter (pretty printer) can do most of the heavy lifting in
|
||||||
%% terms to json strings, but it expects a jsx event iterator. luckily, the mapping from
|
%% converting erlang terms to json strings, but it expects a jsx event
|
||||||
%% erlang terms to jsx events is straightforward and the iterator can be faked with an
|
%% iterator. luckily, the mapping from erlang terms to jsx events is
|
||||||
%% anonymous function
|
%% straightforward and the iterator can be faked with an anonymous function
|
||||||
term_to_json(List, Opts) ->
|
term_to_json(List, Opts) ->
|
||||||
case proplists:get_value(strict, Opts, true) of
|
case proplists:get_value(strict, Opts, true) of
|
||||||
true when is_list(List) -> continue
|
true when is_list(List) -> continue
|
||||||
|
@ -59,7 +58,9 @@ term_to_json(List, Opts) ->
|
||||||
; false -> continue
|
; false -> continue
|
||||||
end,
|
end,
|
||||||
Encoding = proplists:get_value(encoding, Opts, utf8),
|
Encoding = proplists:get_value(encoding, Opts, utf8),
|
||||||
jsx:format(jsx:eventify(lists:reverse([end_json] ++ term_to_events(List))), [{output_encoding, Encoding}] ++ Opts).
|
jsx:format(jsx:eventify(lists:reverse([end_json] ++ term_to_events(List))),
|
||||||
|
[{output_encoding, Encoding}] ++ Opts
|
||||||
|
).
|
||||||
|
|
||||||
|
|
||||||
%% parse opts for the decoder
|
%% parse opts for the decoder
|
||||||
|
@ -67,7 +68,9 @@ opts_to_jsx_opts(Opts) ->
|
||||||
opts_to_jsx_opts(Opts, []).
|
opts_to_jsx_opts(Opts, []).
|
||||||
|
|
||||||
opts_to_jsx_opts([{encoding, Val}|Rest], Acc) ->
|
opts_to_jsx_opts([{encoding, Val}|Rest], Acc) ->
|
||||||
case lists:member(Val, [auto, utf8, utf16, {utf16, little}, utf32, {utf32, little}]) of
|
case lists:member(Val,
|
||||||
|
[auto, utf8, utf16, {utf16, little}, utf32, {utf32, little}]
|
||||||
|
) of
|
||||||
true -> opts_to_jsx_opts(Rest, [{encoding, Val}] ++ Acc)
|
true -> opts_to_jsx_opts(Rest, [{encoding, Val}] ++ Acc)
|
||||||
; false -> opts_to_jsx_opts(Rest, Acc)
|
; false -> opts_to_jsx_opts(Rest, Acc)
|
||||||
end;
|
end;
|
||||||
|
@ -85,22 +88,27 @@ opts_to_jsx_opts([], Acc) ->
|
||||||
|
|
||||||
%% ensure the first jsx event we get is start_object or start_array when running
|
%% ensure the first jsx event we get is start_object or start_array when running
|
||||||
%% in strict mode
|
%% in strict mode
|
||||||
collect_strict({event, Start, Next}, Acc, Opts) when Start =:= start_object; Start =:= start_array ->
|
collect_strict({event, Start, Next}, Acc, Opts)
|
||||||
|
when Start =:= start_object; Start =:= start_array ->
|
||||||
collect(Next(), [[]|Acc], Opts);
|
collect(Next(), [[]|Acc], Opts);
|
||||||
collect_strict(_, _, _) ->
|
collect_strict(_, _, _) ->
|
||||||
erlang:error(badarg).
|
erlang:error(badarg).
|
||||||
|
|
||||||
|
|
||||||
%% collect decoder events and convert to eep0018 format
|
%% collect decoder events and convert to eep0018 format
|
||||||
collect({event, Start, Next}, Acc, Opts) when Start =:= start_object; Start =:= start_array ->
|
collect({event, Start, Next}, Acc, Opts)
|
||||||
|
when Start =:= start_object; Start =:= start_array ->
|
||||||
collect(Next(), [[]|Acc], Opts);
|
collect(Next(), [[]|Acc], Opts);
|
||||||
%% special case for empty object
|
%% special case for empty object
|
||||||
collect({event, end_object, Next}, [[], Parent|Rest], Opts) when is_list(Parent) ->
|
collect({event, end_object, Next}, [[], Parent|Rest], Opts)
|
||||||
|
when is_list(Parent) ->
|
||||||
collect(Next(), [[[{}]] ++ Parent] ++ Rest, Opts);
|
collect(Next(), [[[{}]] ++ Parent] ++ Rest, Opts);
|
||||||
%% reverse the array/object accumulator before prepending it to it's parent
|
%% reverse the array/object accumulator before prepending it to it's parent
|
||||||
collect({event, end_object, Next}, [Current, Parent|Rest], Opts) when is_list(Parent) ->
|
collect({event, end_object, Next}, [Current, Parent|Rest], Opts)
|
||||||
|
when is_list(Parent) ->
|
||||||
collect(Next(), [[lists:reverse(Current)] ++ Parent] ++ Rest, Opts);
|
collect(Next(), [[lists:reverse(Current)] ++ Parent] ++ Rest, Opts);
|
||||||
collect({event, end_array, Next}, [Current, Parent|Rest], Opts) when is_list(Parent) ->
|
collect({event, end_array, Next}, [Current, Parent|Rest], Opts)
|
||||||
|
when is_list(Parent) ->
|
||||||
collect(Next(), [[lists:reverse(Current)] ++ Parent] ++ Rest, Opts);
|
collect(Next(), [[lists:reverse(Current)] ++ Parent] ++ Rest, Opts);
|
||||||
%% special case for empty object
|
%% special case for empty object
|
||||||
collect({event, end_object, Next}, [[], Key, Parent|Rest], Opts) ->
|
collect({event, end_object, Next}, [[], Key, Parent|Rest], Opts) ->
|
||||||
|
@ -110,24 +118,25 @@ collect({event, End, Next}, [Current, Key, Parent|Rest], Opts)
|
||||||
collect(Next(), [[{Key, lists:reverse(Current)}] ++ Parent] ++ Rest, Opts);
|
collect(Next(), [[{Key, lists:reverse(Current)}] ++ Parent] ++ Rest, Opts);
|
||||||
collect({event, end_json, _Next}, [[Acc]], _Opts) ->
|
collect({event, end_json, _Next}, [[Acc]], _Opts) ->
|
||||||
Acc;
|
Acc;
|
||||||
%% key can only be emitted inside of a json object, so just insert it directly into
|
%% key can only be emitted inside of a json object, so just insert it directly
|
||||||
%% the head of the accumulator and deal with it when we receive it's paired value
|
%% into the head of the accumulator and deal with it when we receive it's
|
||||||
|
%% paired value
|
||||||
collect({event, {key, _} = PreKey, Next}, [Current|_] = Acc, Opts) ->
|
collect({event, {key, _} = PreKey, Next}, [Current|_] = Acc, Opts) ->
|
||||||
Key = event(PreKey, Opts),
|
Key = event(PreKey, Opts),
|
||||||
case decode_key_repeats(Key, Current) of
|
case decode_key_repeats(Key, Current) of
|
||||||
true -> erlang:error(badarg)
|
true -> erlang:error(badarg)
|
||||||
; false -> collect(Next(), [Key] ++ Acc, Opts)
|
; false -> collect(Next(), [Key] ++ Acc, Opts)
|
||||||
end;
|
end;
|
||||||
%% check acc to see if we're inside an object or an array. because inside an object
|
%% check acc to see if we're inside an object or an array. because inside an
|
||||||
%% context the events that fall this far are always preceded by a key (which are
|
%% object context the events that fall this far are always preceded by a key
|
||||||
%% binaries or atoms), if Current is a list, we're inside an array, else, an
|
%% (which are binaries or atoms), if Current is a list, we're inside an array,
|
||||||
%% object
|
%% else, an object
|
||||||
collect({event, Event, Next}, [Current|Rest], Opts) when is_list(Current) ->
|
collect({event, Event, Next}, [Current|Rest], Opts) when is_list(Current) ->
|
||||||
collect(Next(), [[event(Event, Opts)] ++ Current] ++ Rest, Opts);
|
collect(Next(), [[event(Event, Opts)] ++ Current] ++ Rest, Opts);
|
||||||
collect({event, Event, Next}, [Key, Current|Rest], Opts) ->
|
collect({event, Event, Next}, [Key, Current|Rest], Opts) ->
|
||||||
collect(Next(), [[{Key, event(Event, Opts)}] ++ Current] ++ Rest, Opts);
|
collect(Next(), [[{Key, event(Event, Opts)}] ++ Current] ++ Rest, Opts);
|
||||||
%% if our first returned event is {incomplete, ...} try to force end and return the
|
%% if our first returned event is {incomplete, ...} try to force end and return
|
||||||
%% Event if one is returned
|
%% the Event if one is returned
|
||||||
collect({incomplete, More}, [[]], Opts) ->
|
collect({incomplete, More}, [[]], Opts) ->
|
||||||
case More(end_stream) of
|
case More(end_stream) of
|
||||||
{event, Event, _Next} -> event(Event, Opts)
|
{event, Event, _Next} -> event(Event, Opts)
|
||||||
|
@ -170,7 +179,8 @@ decode_key_repeats(_Key, []) -> false.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
%% convert eep0018 representation to jsx events. note special casing for the empty object
|
%% convert eep0018 representation to jsx events. note special casing for the
|
||||||
|
%% empty object
|
||||||
term_to_events([{}]) ->
|
term_to_events([{}]) ->
|
||||||
[end_object, start_object];
|
[end_object, start_object];
|
||||||
term_to_events([First|_] = List) when is_tuple(First) ->
|
term_to_events([First|_] = List) when is_tuple(First) ->
|
||||||
|
@ -203,7 +213,7 @@ list_to_events([], Acc) ->
|
||||||
term_to_event(List) when is_list(List) ->
|
term_to_event(List) when is_list(List) ->
|
||||||
term_to_events(List);
|
term_to_events(List);
|
||||||
term_to_event(Float) when is_float(Float) ->
|
term_to_event(Float) when is_float(Float) ->
|
||||||
[{float, float_to_decimal(Float)}];
|
[{float, nice_decimal(Float)}];
|
||||||
term_to_event(Integer) when is_integer(Integer) ->
|
term_to_event(Integer) when is_integer(Integer) ->
|
||||||
[{integer, erlang:integer_to_list(Integer)}];
|
[{integer, erlang:integer_to_list(Integer)}];
|
||||||
term_to_event(String) when is_binary(String) ->
|
term_to_event(String) when is_binary(String) ->
|
||||||
|
@ -222,24 +232,32 @@ key_to_event(Key) when is_binary(Key) ->
|
||||||
|
|
||||||
encode_key_repeats([Key], SoFar) -> encode_key_repeats(Key, SoFar, 0).
|
encode_key_repeats([Key], SoFar) -> encode_key_repeats(Key, SoFar, 0).
|
||||||
|
|
||||||
encode_key_repeats(Key, [Key|_], 0) -> true;
|
encode_key_repeats(Key, [Key|_], 0) ->
|
||||||
encode_key_repeats(Key, [end_object|Rest], Level) -> encode_key_repeats(Key, Rest, Level + 1);
|
true;
|
||||||
encode_key_repeats(_, [start_object|_], 0) -> false;
|
encode_key_repeats(Key, [end_object|Rest], Level) ->
|
||||||
encode_key_repeats(Key, [start_object|Rest], Level) -> encode_key_repeats(Key, Rest, Level - 1);
|
encode_key_repeats(Key, Rest, Level + 1);
|
||||||
encode_key_repeats(Key, [_|Rest], Level) -> encode_key_repeats(Key, Rest, Level);
|
encode_key_repeats(_, [start_object|_], 0) ->
|
||||||
encode_key_repeats(_, [], 0) -> false.
|
false;
|
||||||
|
encode_key_repeats(Key, [start_object|Rest], Level) ->
|
||||||
|
encode_key_repeats(Key, Rest, Level - 1);
|
||||||
|
encode_key_repeats(Key, [_|Rest], Level) ->
|
||||||
|
encode_key_repeats(Key, Rest, Level);
|
||||||
|
encode_key_repeats(_, [], 0) ->
|
||||||
|
false.
|
||||||
|
|
||||||
|
|
||||||
%% conversion of floats to 'nice' decimal output. erlang's float implementation is almost
|
%% conversion of floats to 'nice' decimal output. erlang's float implementation
|
||||||
%% but not quite ieee 754. it converts negative zero to plain zero silently, and throws
|
%% is almost but not quite ieee 754. it converts negative zero to plain zero
|
||||||
%% exceptions for any operations that would produce NaN or infinity. as far as I can
|
%% silently, and throws exceptions for any operations that would produce NaN
|
||||||
%% tell that is. trying to match against NaN or infinity binary patterns produces nomatch
|
%% or infinity. as far as I can tell that is. trying to match against NaN or
|
||||||
%% exceptions, and arithmetic operations produce badarg exceptions. with that in mind, this
|
%% infinity binary patterns produces nomatch exceptions, and arithmetic
|
||||||
%% function makes no attempt to handle special values (except for zero)
|
%% operations produce badarg exceptions. with that in mind, this function
|
||||||
|
%% makes no attempt to handle special values (except for zero)
|
||||||
|
|
||||||
%% algorithm from "Printing FLoating-Point Numbers Quickly and Accurately" by Burger & Dybvig
|
%% algorithm from "Printing FLoating-Point Numbers Quickly and Accurately" by
|
||||||
float_to_decimal(0.0) -> "0.0";
|
%% Burger & Dybvig
|
||||||
float_to_decimal(Num) when is_float(Num) ->
|
nice_decimal(0.0) -> "0.0";
|
||||||
|
nice_decimal(Num) when is_float(Num) ->
|
||||||
{F, E} = extract(<<Num:64/float>>),
|
{F, E} = extract(<<Num:64/float>>),
|
||||||
{R, S, MP, MM} = initial_vals(F, E),
|
{R, S, MP, MM} = initial_vals(F, E),
|
||||||
K = ceiling(math:log10(abs(Num)) - 1.0e-10),
|
K = ceiling(math:log10(abs(Num)) - 1.0e-10),
|
||||||
|
@ -315,7 +333,8 @@ generate(RT, S, MP, MM, Round) ->
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
||||||
%% this is not efficient at all and should be replaced with a lookup table probably
|
%% this is not efficient at all and should be replaced with a lookup table
|
||||||
|
%% probably
|
||||||
pow(_B, 0) -> 1;
|
pow(_B, 0) -> 1;
|
||||||
pow(B, E) when E > 0 -> pow(B, E, 1).
|
pow(B, E) when E > 0 -> pow(B, E, 1).
|
||||||
|
|
||||||
|
@ -331,8 +350,10 @@ format(Dpoint, Digits) when Dpoint =< length(Digits), Dpoint > 0 ->
|
||||||
format(Dpoint, Digits) when Dpoint > 0 ->
|
format(Dpoint, Digits) when Dpoint > 0 ->
|
||||||
Pad = Dpoint - length(Digits),
|
Pad = Dpoint - length(Digits),
|
||||||
case Pad of
|
case Pad of
|
||||||
X when X > 6 -> format(Digits, 1, []) ++ "e" ++ integer_to_list(Dpoint - 1)
|
X when X > 6 ->
|
||||||
; _ -> format(Digits ++ [ 0 || _ <- lists:seq(1, Pad)], Dpoint, [])
|
format(Digits, 1, []) ++ "e" ++ integer_to_list(Dpoint - 1)
|
||||||
|
; _ ->
|
||||||
|
format(Digits ++ [ 0 || _ <- lists:seq(1, Pad)], Dpoint, [])
|
||||||
end;
|
end;
|
||||||
format(Dpoint, Digits) when Dpoint < 0 ->
|
format(Dpoint, Digits) when Dpoint < 0 ->
|
||||||
format(Digits, 1, []) ++ "e" ++ integer_to_list(Dpoint - 1).
|
format(Digits, 1, []) ++ "e" ++ integer_to_list(Dpoint - 1).
|
||||||
|
@ -344,32 +365,41 @@ format([], ignore, Acc) ->
|
||||||
format(Digits, 0, Acc) ->
|
format(Digits, 0, Acc) ->
|
||||||
format(Digits, ignore, "." ++ Acc);
|
format(Digits, ignore, "." ++ Acc);
|
||||||
format([Digit|Digits], Dpoint, Acc) ->
|
format([Digit|Digits], Dpoint, Acc) ->
|
||||||
format(Digits, case Dpoint of ignore -> ignore; X -> X - 1 end, to_ascii(Digit) ++ Acc).
|
format(Digits,
|
||||||
|
case Dpoint of ignore -> ignore; X -> X - 1 end, to_ascii(Digit) ++ Acc
|
||||||
|
).
|
||||||
|
|
||||||
|
|
||||||
to_ascii(X) -> [X + 48]. %% ascii "1" is [49], "2" is [50], etc...
|
to_ascii(X) -> [X + 48]. %% ascii "1" is [49], "2" is [50], etc...
|
||||||
|
|
||||||
|
|
||||||
%% json string escaping, for utf8 binaries. escape the json control sequences to their
|
%% json string escaping, for utf8 binaries. escape the json control sequences to
|
||||||
%% json equivalent, escape other control characters to \uXXXX sequences, everything
|
%% their json equivalent, escape other control characters to \uXXXX sequences,
|
||||||
%% else should be a legal json string component
|
%% everything else should be a legal json string component
|
||||||
json_escape(String) ->
|
json_escape(String) ->
|
||||||
json_escape(String, <<>>).
|
json_escape(String, <<>>).
|
||||||
|
|
||||||
%% double quote
|
%% double quote
|
||||||
json_escape(<<$\", Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $\">>);
|
json_escape(<<$\", Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $\">>);
|
||||||
%% backslash \ reverse solidus
|
%% backslash \ reverse solidus
|
||||||
json_escape(<<$\\, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $\\>>);
|
json_escape(<<$\\, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $\\>>);
|
||||||
%% backspace
|
%% backspace
|
||||||
json_escape(<<$\b, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $b>>);
|
json_escape(<<$\b, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $b>>);
|
||||||
%% form feed
|
%% form feed
|
||||||
json_escape(<<$\f, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $f>>);
|
json_escape(<<$\f, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $f>>);
|
||||||
%% newline
|
%% newline
|
||||||
json_escape(<<$\n, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $n>>);
|
json_escape(<<$\n, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $n>>);
|
||||||
%% cr
|
%% cr
|
||||||
json_escape(<<$\r, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $r>>);
|
json_escape(<<$\r, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $r>>);
|
||||||
%% tab
|
%% tab
|
||||||
json_escape(<<$\t, Rest/binary>>, Acc) -> json_escape(Rest, <<Acc/binary, $\\, $t>>);
|
json_escape(<<$\t, Rest/binary>>, Acc) ->
|
||||||
|
json_escape(Rest, <<Acc/binary, $\\, $t>>);
|
||||||
%% other control characters
|
%% other control characters
|
||||||
json_escape(<<C/utf8, Rest/binary>>, Acc) when C >= 0, C < $\s ->
|
json_escape(<<C/utf8, Rest/binary>>, Acc) when C >= 0, C < $\s ->
|
||||||
json_escape(Rest, <<Acc/binary, (json_escape_sequence(C))/binary>>);
|
json_escape(Rest, <<Acc/binary, (json_escape_sequence(C))/binary>>);
|
||||||
|
@ -382,8 +412,8 @@ json_escape(_, _) ->
|
||||||
erlang:error(badarg).
|
erlang:error(badarg).
|
||||||
|
|
||||||
|
|
||||||
%% convert a codepoint to it's \uXXXX equiv. for laziness, this only handles codepoints
|
%% convert a codepoint to it's \uXXXX equiv. for laziness, this only handles
|
||||||
%% this module might escape, ie, control characters
|
%% codepoints this module might escape, ie, control characters
|
||||||
json_escape_sequence(C) when C < 16#20 ->
|
json_escape_sequence(C) when C < 16#20 ->
|
||||||
<<_:8, A:4, B:4>> = <<C:16>>, % first two hex digits are always zero
|
<<_:8, A:4, B:4>> = <<C:16>>, % first two hex digits are always zero
|
||||||
<<$\\, $u, $0, $0, (to_hex(A)), (to_hex(B))>>.
|
<<$\\, $u, $0, $0, (to_hex(A)), (to_hex(B))>>.
|
||||||
|
@ -405,64 +435,194 @@ decode_test_() ->
|
||||||
[
|
[
|
||||||
{"empty object", ?_assert(json_to_term(<<"{}">>, []) =:= [{}])},
|
{"empty object", ?_assert(json_to_term(<<"{}">>, []) =:= [{}])},
|
||||||
{"empty array", ?_assert(json_to_term(<<"[]">>, []) =:= [])},
|
{"empty array", ?_assert(json_to_term(<<"[]">>, []) =:= [])},
|
||||||
{"simple object", ?_assert(json_to_term(<<"{\"a\": true, \"b\": true, \"c\": true}">>, [{label, atom}]) =:= [{a, true}, {b, true}, {c, true}])},
|
{"simple object",
|
||||||
{"simple array", ?_assert(json_to_term(<<"[true,true,true]">>, []) =:= [true, true, true])},
|
?_assert(json_to_term(
|
||||||
{"nested structures", ?_assert(json_to_term(<<"{\"list\":[{\"list\":[{}, {}],\"object\":{}}, []],\"object\":{}}">>, [{label, atom}]) =:= [{list, [[{list, [[{}], [{}]]}, {object, [{}]}],[]]}, {object, [{}]}])},
|
<<"{\"a\": true, \"b\": true, \"c\": true}">>,
|
||||||
{"numbers", ?_assert(json_to_term(<<"[-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0]">>, []) =:= [-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0])},
|
[{label, atom}]
|
||||||
{"numbers (all floats)", ?_assert(json_to_term(<<"[-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0]">>, [{float, true}]) =:= [-10000000000.0, -1.0, 0.0, 0.0, 1.0, 10000000000.0, 1000000000.0])},
|
) =:= [{a, true}, {b, true}, {c, true}]
|
||||||
{"strings", ?_assert(json_to_term(<<"[\"a string\"]">>, []) =:= [<<"a string">>])},
|
)
|
||||||
{"literals", ?_assert(json_to_term(<<"[true,false,null]">>, []) =:= [true,false,null])},
|
},
|
||||||
{"naked true", ?_assert(json_to_term(<<"true">>, [{strict, false}]) =:= true)},
|
{"simple array",
|
||||||
{"naked short number", ?_assert(json_to_term(<<"1">>, [{strict, false}]) =:= 1)},
|
?_assert(json_to_term(<<"[true,true,true]">>,
|
||||||
|
[]
|
||||||
|
) =:= [true, true, true]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"nested structures",
|
||||||
|
?_assert(json_to_term(
|
||||||
|
<<"{\"x\":[{\"x\":[{}, {}],\"y\":{}}, []],\"y\":{}}">>,
|
||||||
|
[{label, atom}]
|
||||||
|
) =:= [{x, [[{x, [[{}], [{}]]}, {y, [{}]}],[]]}, {y, [{}]}]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"numbers",
|
||||||
|
?_assert(json_to_term(
|
||||||
|
<<"[-100000000.0, -1, 0.0, 0, 1, 100000000, 10000000.0]">>,
|
||||||
|
[]
|
||||||
|
) =:= [-100000000.0, -1, 0.0, 0, 1, 100000000, 10000000.0]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"numbers (all floats)",
|
||||||
|
?_assert(json_to_term(
|
||||||
|
<<"[-100000000.0, -1, 0.0, 0, 1, 1000, 10000000.0]">>,
|
||||||
|
[{float, true}]
|
||||||
|
) =:= [-100000000.0, -1.0, 0.0, 0.0, 1.0, 1000.0, 10000000.0]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"strings",
|
||||||
|
?_assert(json_to_term(<<"[\"a string\"]">>,
|
||||||
|
[]
|
||||||
|
) =:= [<<"a string">>])
|
||||||
|
},
|
||||||
|
{"literals",
|
||||||
|
?_assert(json_to_term(<<"[true,false,null]">>,
|
||||||
|
[]
|
||||||
|
) =:= [true,false,null]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"naked true",
|
||||||
|
?_assert(json_to_term(<<"true">>, [{strict, false}]) =:= true)
|
||||||
|
},
|
||||||
|
{"naked short number",
|
||||||
|
?_assert(json_to_term(<<"1">>, [{strict, false}]) =:= 1)
|
||||||
|
},
|
||||||
{"float", ?_assert(json_to_term(<<"1.0">>, [{strict, false}]) =:= 1.0)},
|
{"float", ?_assert(json_to_term(<<"1.0">>, [{strict, false}]) =:= 1.0)},
|
||||||
{"naked string", ?_assert(json_to_term(<<"\"hello world\"">>, [{strict, false}]) =:= <<"hello world">>)},
|
{"naked string",
|
||||||
{"comments", ?_assert(json_to_term(<<"[ /* a comment in an empty array */ ]">>, [{comments, true}]) =:= [])}
|
?_assert(json_to_term(<<"\"hello world\"">>,
|
||||||
|
[{strict, false}]
|
||||||
|
) =:= <<"hello world">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"comments",
|
||||||
|
?_assert(json_to_term(<<"[ /* a comment in an empty array */ ]">>,
|
||||||
|
[{comments, true}]
|
||||||
|
) =:= []
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
encode_test_() ->
|
encode_test_() ->
|
||||||
[
|
[
|
||||||
{"empty object", ?_assert(term_to_json([{}], []) =:= <<"{}">>)},
|
{"empty object", ?_assert(term_to_json([{}], []) =:= <<"{}">>)},
|
||||||
{"empty array", ?_assert(term_to_json([], []) =:= <<"[]">>)},
|
{"empty array", ?_assert(term_to_json([], []) =:= <<"[]">>)},
|
||||||
{"simple object", ?_assert(term_to_json([{a, true}, {b, true}, {c, true}], []) =:= <<"{\"a\":true,\"b\":true,\"c\":true}">>)},
|
{"simple object",
|
||||||
{"simple array", ?_assert(term_to_json([true, true, true], []) =:= <<"[true,true,true]">>)},
|
?_assert(term_to_json([{a, true}, {b, true}, {c, true}],
|
||||||
{"nested structures", ?_assert(term_to_json([{list, [[{list, [[{}], [{}]]}, {object, [{}]}],[]]}, {object, [{}]}], []) =:= <<"{\"list\":[{\"list\":[{},{}],\"object\":{}},[]],\"object\":{}}">>)},
|
[]
|
||||||
{"numbers", ?_assert(term_to_json([-10000000000.0, -1, 0.0, 0, 1, 10000000000, 1000000000.0], []) =:= <<"[-1.0e10,-1,0.0,0,1,10000000000,1.0e9]">>)},
|
) =:= <<"{\"a\":true,\"b\":true,\"c\":true}">>
|
||||||
{"strings", ?_assert(term_to_json([<<"a string">>], []) =:= <<"[\"a string\"]">>)},
|
)
|
||||||
{"literals", ?_assert(term_to_json([true,false,null], []) =:= <<"[true,false,null]">>)},
|
},
|
||||||
{"naked true", ?_assert(term_to_json(true, [{strict, false}]) =:= <<"true">>)},
|
{"simple array",
|
||||||
{"naked number", ?_assert(term_to_json(1, [{strict, false}]) =:= <<"1">>)},
|
?_assert(term_to_json([true, true, true],
|
||||||
|
[]
|
||||||
|
) =:= <<"[true,true,true]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"nested structures",
|
||||||
|
?_assert(term_to_json(
|
||||||
|
[{x, [[{x, [[{}], [{}]]}, {y, [{}]}],[]]}, {y, [{}]}],
|
||||||
|
[]
|
||||||
|
) =:= <<"{\"x\":[{\"x\":[{},{}],\"y\":{}},[]],\"y\":{}}">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"numbers",
|
||||||
|
?_assert(term_to_json(
|
||||||
|
[-10000000000.0, -1, 0.0, 0, 1, 10000000, 1000000000.0],
|
||||||
|
[]
|
||||||
|
) =:= <<"[-1.0e10,-1,0.0,0,1,10000000,1.0e9]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"strings",
|
||||||
|
?_assert(term_to_json([<<"a string">>],
|
||||||
|
[]
|
||||||
|
) =:= <<"[\"a string\"]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"literals",
|
||||||
|
?_assert(term_to_json([true,false,null],
|
||||||
|
[]
|
||||||
|
) =:= <<"[true,false,null]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"naked true",
|
||||||
|
?_assert(term_to_json(true, [{strict, false}]) =:= <<"true">>)
|
||||||
|
},
|
||||||
|
{"naked number",
|
||||||
|
?_assert(term_to_json(1, [{strict, false}]) =:= <<"1">>)
|
||||||
|
},
|
||||||
{"float", ?_assert(term_to_json(1.0, [{strict, false}]) =:= <<"1.0">>)},
|
{"float", ?_assert(term_to_json(1.0, [{strict, false}]) =:= <<"1.0">>)},
|
||||||
{"naked string", ?_assert(term_to_json(<<"hello world">>, [{strict, false}]) =:= <<"\"hello world\"">>)}
|
{"naked string",
|
||||||
|
?_assert(term_to_json(<<"hello world">>,
|
||||||
|
[{strict, false}]
|
||||||
|
) =:= <<"\"hello world\"">>
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
repeated_keys_test_() ->
|
repeated_keys_test_() ->
|
||||||
[
|
[
|
||||||
{"encode", ?_assertError(badarg, term_to_json([{k, true}, {k, false}], []))},
|
{"encode",
|
||||||
{"decode", ?_assertError(badarg, json_to_term(<<"{\"k\": true, \"k\": false}">>, []))}
|
?_assertError(badarg, term_to_json([{k, true}, {k, false}], []))
|
||||||
|
},
|
||||||
|
{"decode",
|
||||||
|
?_assertError(badarg, json_to_term(
|
||||||
|
<<"{\"k\": true, \"k\": false}">>,
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
escape_test_() ->
|
escape_test_() ->
|
||||||
[
|
[
|
||||||
{"json string escaping", ?_assert(json_escape(<<"\"\\\b\f\n\r\t">>) =:= <<"\\\"\\\\\\b\\f\\n\\r\\t">>)},
|
{"json string escaping",
|
||||||
{"json string hex escape", ?_assert(json_escape(<<1, 2, 3, 11, 26, 30, 31>>) =:= <<"\\u0001\\u0002\\u0003\\u000b\\u001a\\u001e\\u001f">>)}
|
?_assert(json_escape(
|
||||||
|
<<"\"\\\b\f\n\r\t">>
|
||||||
|
) =:= <<"\\\"\\\\\\b\\f\\n\\r\\t">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"json string hex escape",
|
||||||
|
?_assert(json_escape(
|
||||||
|
<<1, 2, 3, 11, 26, 30, 31>>
|
||||||
|
) =:= <<"\\u0001\\u0002\\u0003\\u000b\\u001a\\u001e\\u001f">>
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
nice_decimal_test_() ->
|
nice_decimal_test_() ->
|
||||||
[
|
[
|
||||||
{"0.0", ?_assert(float_to_decimal(0.0) =:= "0.0")},
|
{"0.0", ?_assert(nice_decimal(0.0) =:= "0.0")},
|
||||||
{"1.0", ?_assert(float_to_decimal(1.0) =:= "1.0")},
|
{"1.0", ?_assert(nice_decimal(1.0) =:= "1.0")},
|
||||||
{"-1.0", ?_assert(float_to_decimal(-1.0) =:= "-1.0")},
|
{"-1.0", ?_assert(nice_decimal(-1.0) =:= "-1.0")},
|
||||||
{"3.1234567890987654321", ?_assert(float_to_decimal(3.1234567890987654321) =:= "3.1234567890987655")},
|
{"3.1234567890987654321",
|
||||||
{"1.0e23", ?_assert(float_to_decimal(1.0e23) =:= "1.0e23")},
|
?_assert(
|
||||||
{"0.3", ?_assert(float_to_decimal(3.0/10.0) =:= "0.3")},
|
nice_decimal(3.1234567890987654321) =:= "3.1234567890987655")
|
||||||
{"0.0001", ?_assert(float_to_decimal(0.0001) =:= "1.0e-4")},
|
},
|
||||||
{"0.00000001", ?_assert(float_to_decimal(0.00000001) =:= "1.0e-8")},
|
{"1.0e23", ?_assert(nice_decimal(1.0e23) =:= "1.0e23")},
|
||||||
{"1.0e-323", ?_assert(float_to_decimal(1.0e-323) =:= "1.0e-323")},
|
{"0.3", ?_assert(nice_decimal(3.0/10.0) =:= "0.3")},
|
||||||
{"1.0e308", ?_assert(float_to_decimal(1.0e308) =:= "1.0e308")},
|
{"0.0001", ?_assert(nice_decimal(0.0001) =:= "1.0e-4")},
|
||||||
{"min normalized float", ?_assert(float_to_decimal(math:pow(2, -1022)) =:= "2.2250738585072014e-308")},
|
{"0.00000001", ?_assert(nice_decimal(0.00000001) =:= "1.0e-8")},
|
||||||
{"max normalized float", ?_assert(float_to_decimal((2 - math:pow(2, -52)) * math:pow(2, 1023)) =:= "1.7976931348623157e308")},
|
{"1.0e-323", ?_assert(nice_decimal(1.0e-323) =:= "1.0e-323")},
|
||||||
{"min denormalized float", ?_assert(float_to_decimal(math:pow(2, -1074)) =:= "5.0e-324")},
|
{"1.0e308", ?_assert(nice_decimal(1.0e308) =:= "1.0e308")},
|
||||||
{"max denormalized float", ?_assert(float_to_decimal((1 - math:pow(2, -52)) * math:pow(2, -1022)) =:= "2.225073858507201e-308")}
|
{"min normalized float",
|
||||||
|
?_assert(
|
||||||
|
nice_decimal(math:pow(2, -1022)) =:= "2.2250738585072014e-308"
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"max normalized float",
|
||||||
|
?_assert(
|
||||||
|
nice_decimal((2 - math:pow(2, -52)) * math:pow(2, 1023))
|
||||||
|
=:= "1.7976931348623157e308"
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"min denormalized float",
|
||||||
|
?_assert(nice_decimal(math:pow(2, -1074)) =:= "5.0e-324")
|
||||||
|
},
|
||||||
|
{"max denormalized float",
|
||||||
|
?_assert(
|
||||||
|
nice_decimal((1 - math:pow(2, -52)) * math:pow(2, -1022))
|
||||||
|
=:= "2.225073858507201e-308"
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
-endif.
|
-endif.
|
|
@ -79,7 +79,11 @@ format_something({event, start_object, Next}, Opts, Level) ->
|
||||||
{Continue, [?start_object, ?end_object]}
|
{Continue, [?start_object, ?end_object]}
|
||||||
; Event ->
|
; Event ->
|
||||||
{Continue, Object} = format_object(Event, [], Opts, Level + 1),
|
{Continue, Object} = format_object(Event, [], Opts, Level + 1),
|
||||||
{Continue, [?start_object, Object, indent(Opts, Level), ?end_object]}
|
{Continue, [?start_object,
|
||||||
|
Object,
|
||||||
|
indent(Opts, Level),
|
||||||
|
?end_object
|
||||||
|
]}
|
||||||
end;
|
end;
|
||||||
format_something({event, start_array, Next}, Opts, Level) ->
|
format_something({event, start_array, Next}, Opts, Level) ->
|
||||||
case Next() of
|
case Next() of
|
||||||
|
@ -99,10 +103,24 @@ format_object({event, {key, Key}, Next}, Acc, Opts, Level) ->
|
||||||
{Continue, Value} = format_something(Next(), Opts, Level),
|
{Continue, Value} = format_something(Next(), Opts, Level),
|
||||||
case Continue() of
|
case Continue() of
|
||||||
{event, end_object, NextNext} ->
|
{event, end_object, NextNext} ->
|
||||||
{NextNext, [Acc, indent(Opts, Level), encode(string, Key), ?colon, space(Opts), Value]}
|
{NextNext, [Acc,
|
||||||
|
indent(Opts, Level),
|
||||||
|
encode(string, Key),
|
||||||
|
?colon,
|
||||||
|
space(Opts),
|
||||||
|
Value
|
||||||
|
]}
|
||||||
; Else ->
|
; Else ->
|
||||||
format_object(Else,
|
format_object(Else,
|
||||||
[Acc, indent(Opts, Level), encode(string, Key), ?colon, space(Opts), Value, ?comma, space(Opts)],
|
[Acc,
|
||||||
|
indent(Opts, Level),
|
||||||
|
encode(string, Key),
|
||||||
|
?colon,
|
||||||
|
space(Opts),
|
||||||
|
Value,
|
||||||
|
?comma,
|
||||||
|
space(Opts)
|
||||||
|
],
|
||||||
Opts,
|
Opts,
|
||||||
Level
|
Level
|
||||||
)
|
)
|
||||||
|
@ -117,14 +135,24 @@ format_array(Event, Acc, Opts, Level) ->
|
||||||
{event, end_array, NextNext} ->
|
{event, end_array, NextNext} ->
|
||||||
{NextNext, [Acc, indent(Opts, Level), Value]}
|
{NextNext, [Acc, indent(Opts, Level), Value]}
|
||||||
; Else ->
|
; Else ->
|
||||||
format_array(Else, [Acc, indent(Opts, Level), Value, ?comma, space(Opts)], Opts, Level)
|
format_array(Else,
|
||||||
|
[Acc,
|
||||||
|
indent(Opts, Level),
|
||||||
|
Value,
|
||||||
|
?comma,
|
||||||
|
space(Opts)
|
||||||
|
],
|
||||||
|
Opts,
|
||||||
|
Level
|
||||||
|
)
|
||||||
end.
|
end.
|
||||||
|
|
||||||
|
|
||||||
encode(Acc, Opts) when is_list(Acc) ->
|
encode(Acc, Opts) when is_list(Acc) ->
|
||||||
case Opts#format_opts.output_encoding of
|
case Opts#format_opts.output_encoding of
|
||||||
iolist -> Acc
|
iolist -> Acc
|
||||||
; UTF when ?is_utf_encoding(UTF) -> unicode:characters_to_binary(Acc, utf8, UTF)
|
; UTF when ?is_utf_encoding(UTF) ->
|
||||||
|
unicode:characters_to_binary(Acc, utf8, UTF)
|
||||||
; _ -> erlang:throw(badarg)
|
; _ -> erlang:throw(badarg)
|
||||||
end;
|
end;
|
||||||
encode(string, String) ->
|
encode(string, String) ->
|
||||||
|
@ -162,17 +190,58 @@ space(Opts) ->
|
||||||
|
|
||||||
minify_test_() ->
|
minify_test_() ->
|
||||||
[
|
[
|
||||||
{"minify object", ?_assert(format(<<" { \"key\" :\n\t \"value\"\r\r\r\n } ">>, []) =:= <<"{\"key\":\"value\"}">>)},
|
{"minify object",
|
||||||
{"minify array", ?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>, []) =:= <<"[true,false,null]">>)}
|
?_assert(format(<<" { \"key\" :\n\t \"value\"\r\r\r\n } ">>,
|
||||||
|
[]
|
||||||
|
) =:= <<"{\"key\":\"value\"}">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"minify array",
|
||||||
|
?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>,
|
||||||
|
[]
|
||||||
|
) =:= <<"[true,false,null]">>
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
opts_test_() ->
|
opts_test_() ->
|
||||||
[
|
[
|
||||||
{"unspecified indent/space", ?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>, [space, indent]) =:= <<"[\n true, \n false, \n null\n]">>)},
|
{"unspecified indent/space",
|
||||||
{"specific indent/space", ?_assert(format(<<"\n{\n\"key\" : [],\n\"another key\" : true\n}\n">>, [{space, 2}, {indent, 4}]) =:= <<"{\n \"key\": [], \n \"another key\": true\n}">>)},
|
?_assert(format(<<" [\n\ttrue,\n\tfalse,\n\tnull\n] ">>,
|
||||||
{"nested structures", ?_assert(format(<<"[{\"key\":\"value\", \"another key\": \"another value\"}, [[true, false, null]]]">>, [{space, 2}, {indent, 2}]) =:= <<"[\n {\n \"key\": \"value\", \n \"another key\": \"another value\"\n }, \n [\n [\n true, \n false, \n null\n ]\n ]\n]">>)},
|
[space, indent]
|
||||||
{"just spaces", ?_assert(format(<<"[1,2,3]">>, [{space, 2}]) =:= <<"[1, 2, 3]">>)},
|
) =:= <<"[\n true, \n false, \n null\n]">>
|
||||||
{"just indent", ?_assert(format(<<"[1.0, 2.0, 3.0]">>, [{indent, 2}]) =:= <<"[\n 1.0,\n 2.0,\n 3.0\n]">>)}
|
)
|
||||||
|
},
|
||||||
|
{"specific indent/space",
|
||||||
|
?_assert(format(
|
||||||
|
<<"\n{\n\"key\" : [],\n\"another key\" : true\n}\n">>,
|
||||||
|
[{space, 2}, {indent, 3}]
|
||||||
|
) =:= <<"{\n \"key\": [], \n \"another key\": true\n}">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"nested structures",
|
||||||
|
?_assert(format(
|
||||||
|
<<"[{\"key\":\"value\",
|
||||||
|
\"another key\": \"another value\"
|
||||||
|
},
|
||||||
|
[[true, false, null]]
|
||||||
|
]">>,
|
||||||
|
[{space, 2}, {indent, 2}]
|
||||||
|
) =:= <<"[\n {\n \"key\": \"value\", \n \"another key\": \"another value\"\n }, \n [\n [\n true, \n false, \n null\n ]\n ]\n]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"just spaces",
|
||||||
|
?_assert(format(<<"[1,2,3]">>,
|
||||||
|
[{space, 2}]
|
||||||
|
) =:= <<"[1, 2, 3]">>
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"just indent",
|
||||||
|
?_assert(format(<<"[1.0, 2.0, 3.0]">>,
|
||||||
|
[{indent, 2}]
|
||||||
|
) =:= <<"[\n 1.0,\n 2.0,\n 3.0\n]">>
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
-endif.
|
-endif.
|
|
@ -68,8 +68,8 @@ collect({event, start_object, Next}, Keys) -> collect(Next(), [[]|Keys]);
|
||||||
collect({event, end_object, Next}, [_|Keys]) -> collect(Next(), [Keys]);
|
collect({event, end_object, Next}, [_|Keys]) -> collect(Next(), [Keys]);
|
||||||
|
|
||||||
|
|
||||||
%% check to see if key has already been encountered, if not add it to the key accumulator
|
%% check to see if key has already been encountered, if not add it to the key
|
||||||
%% and continue, else return false
|
%% accumulator and continue, else return false
|
||||||
collect({event, {key, Key}, Next}, [Current|Keys]) ->
|
collect({event, {key, Key}, Next}, [Current|Keys]) ->
|
||||||
case lists:member(Key, Current) of
|
case lists:member(Key, Current) of
|
||||||
true -> false
|
true -> false
|
||||||
|
@ -81,7 +81,8 @@ collect({event, _, Next}, Keys) ->
|
||||||
collect(Next(), Keys);
|
collect(Next(), Keys);
|
||||||
|
|
||||||
|
|
||||||
%% needed to parse numbers that don't have trailing whitespace in less strict mode
|
%% needed to parse numbers that don't have trailing whitespace in less strict
|
||||||
|
%% mode
|
||||||
collect({incomplete, More}, Keys) ->
|
collect({incomplete, More}, Keys) ->
|
||||||
collect(More(end_stream), Keys);
|
collect(More(end_stream), Keys);
|
||||||
|
|
||||||
|
@ -98,32 +99,85 @@ true_test_() ->
|
||||||
[
|
[
|
||||||
{"empty object", ?_assert(is_json(<<"{}">>, []) =:= true)},
|
{"empty object", ?_assert(is_json(<<"{}">>, []) =:= true)},
|
||||||
{"empty array", ?_assert(is_json(<<"[]">>, []) =:= true)},
|
{"empty array", ?_assert(is_json(<<"[]">>, []) =:= true)},
|
||||||
{"whitespace", ?_assert(is_json(<<" \n \t \r [true] \t \n\r ">>, []) =:= true)},
|
{"whitespace",
|
||||||
{"nested terms", ?_assert(is_json(<<"[ { \"key\": [ {}, {}, {} ], \"more key\": [{}] }, {}, [[[]]] ]">>, []) =:= true)},
|
?_assert(is_json(<<" \n \t \r [true] \t \n\r ">>,
|
||||||
{"numbers", ?_assert(is_json(<<"[ -1.0, -1, -0, 0, 1e-1, 1, 1.0, 1e1 ]">>, []) =:= true)},
|
[]
|
||||||
{"strings", ?_assert(is_json(<<"[ \"a\", \"string\", \"in\", \"multiple\", \"acts\" ]">>, []) =:= true)},
|
) =:= true
|
||||||
{"literals", ?_assert(is_json(<<"[ true, false, null ]">>, []) =:= true)},
|
)
|
||||||
{"nested objects", ?_assert(is_json(<<"{\"key\": { \"key\": true}}">>, []) =:= true)}
|
},
|
||||||
|
{"nested terms",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"[{ \"x\": [ {}, {}, {} ], \"y\": [{}] }, {}, [[[]]]]">>,
|
||||||
|
[]
|
||||||
|
) =:= true
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"numbers",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"[ -1.0, -1, -0, 0, 1e-1, 1, 1.0, 1e1 ]">>,
|
||||||
|
[]
|
||||||
|
) =:= true
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"strings",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"[ \"a\", \"string\", \"in\", \"multiple\", \"acts\" ]">>,
|
||||||
|
[]
|
||||||
|
) =:= true
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"literals",
|
||||||
|
?_assert(is_json(<<"[ true, false, null ]">>, []) =:= true)
|
||||||
|
},
|
||||||
|
{"nested objects",
|
||||||
|
?_assert(is_json(<<"{\"key\": { \"key\": true}}">>, []) =:= true)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
false_test_() ->
|
false_test_() ->
|
||||||
[
|
[
|
||||||
{"naked true", ?_assert(is_json(<<"true">>, []) =:= false)},
|
{"naked true", ?_assert(is_json(<<"true">>, []) =:= false)},
|
||||||
{"naked number", ?_assert(is_json(<<"1">>, []) =:= false)},
|
{"naked number", ?_assert(is_json(<<"1">>, []) =:= false)},
|
||||||
{"naked string", ?_assert(is_json(<<"\"i am not json\"">>, []) =:= false)},
|
{"naked string",
|
||||||
|
?_assert(is_json(<<"\"i am not json\"">>, []) =:= false)
|
||||||
|
},
|
||||||
{"unbalanced list", ?_assert(is_json(<<"[[[]]">>, []) =:= false)},
|
{"unbalanced list", ?_assert(is_json(<<"[[[]]">>, []) =:= false)},
|
||||||
{"trailing comma", ?_assert(is_json(<<"[ true, false, null, ]">>, []) =:= false)},
|
{"trailing comma",
|
||||||
|
?_assert(is_json(<<"[ true, false, null, ]">>, []) =:= false)
|
||||||
|
},
|
||||||
{"unquoted key", ?_assert(is_json(<<"{ key: false }">>, []) =:= false)},
|
{"unquoted key", ?_assert(is_json(<<"{ key: false }">>, []) =:= false)},
|
||||||
{"repeated key", ?_assert(is_json(<<"{\"key\": true, \"key\": true}">>, []) =:= false)},
|
{"repeated key",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"{\"key\": true, \"key\": true}">>,
|
||||||
|
[]
|
||||||
|
) =:= false
|
||||||
|
)
|
||||||
|
},
|
||||||
{"comments", ?_assert(is_json(<<"[ /* a comment */ ]">>, []) =:= false)}
|
{"comments", ?_assert(is_json(<<"[ /* a comment */ ]">>, []) =:= false)}
|
||||||
].
|
].
|
||||||
|
|
||||||
less_strict_test_() ->
|
less_strict_test_() ->
|
||||||
[
|
[
|
||||||
{"naked true", ?_assert(is_json(<<"true">>, [{strict, false}]) =:= true)},
|
{"naked true",
|
||||||
{"naked number", ?_assert(is_json(<<"1">>, [{strict, false}]) =:= true)},
|
?_assert(is_json(<<"true">>, [{strict, false}]) =:= true)
|
||||||
{"naked string", ?_assert(is_json(<<"\"i am not json\"">>, [{strict, false}]) =:= true)},
|
},
|
||||||
{"comments", ?_assert(is_json(<<"[ /* a comment */ ]">>, [{comments, true}]) =:= true)}
|
{"naked number",
|
||||||
|
?_assert(is_json(<<"1">>, [{strict, false}]) =:= true)
|
||||||
|
},
|
||||||
|
{"naked string",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"\"i am not json\"">>,
|
||||||
|
[{strict, false}]
|
||||||
|
) =:= true
|
||||||
|
)
|
||||||
|
},
|
||||||
|
{"comments",
|
||||||
|
?_assert(is_json(
|
||||||
|
<<"[ /* a comment */ ]">>,
|
||||||
|
[{comments, true}]
|
||||||
|
) =:= true
|
||||||
|
)
|
||||||
|
}
|
||||||
].
|
].
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue