0
Fork 0
mirror of https://github.com/ninenines/cowboy.git synced 2025-07-14 20:30:23 +00:00

Use the new chunked and identity transfer decoding code from cowlib

This commit is contained in:
Loïc Hoguin 2014-03-24 14:14:43 +01:00
parent dbf7b1c5e5
commit d4ce3c638d
2 changed files with 15 additions and 80 deletions

View file

@ -40,8 +40,6 @@
-export([parameterized_tokens/1]).
%% Decoding.
-export([te_chunked/2]).
-export([te_identity/2]).
-export([ce_identity/1]).
%% Parsing.
@ -872,74 +870,8 @@ parameterized_tokens_param(Data, Fun) ->
%% Decoding.
%% @doc Decode a stream of chunks.
-spec te_chunked(Bin, TransferState)
-> more | {more, non_neg_integer(), Bin, TransferState}
| {ok, Bin, Bin, TransferState}
| {done, non_neg_integer(), Bin} | {error, badarg}
when Bin::binary(), TransferState::{non_neg_integer(), non_neg_integer()}.
te_chunked(<< "0\r\n\r\n", Rest/binary >>, {0, Streamed}) ->
{done, Streamed, Rest};
te_chunked(Data, {0, Streamed}) ->
%% @todo We are expecting an hex size, not a general token.
token(Data,
fun (<< "\r\n", Rest/binary >>, BinLen) ->
case list_to_integer(binary_to_list(BinLen), 16) of
%% Final chunk is parsed in one go above. Rest would be
%% <<\r\n">> if complete.
0 when byte_size(Rest) < 2 ->
more;
%% Normal chunk. Add 2 to Len for trailing <<"\r\n">>. Note
%% that repeated <<"-2\r\n">> would be streamed, and
%% accumulated, until out of memory if Len could be -2.
Len when Len > 0 ->
te_chunked(Rest, {Len + 2, Streamed})
end;
%% Chunk size shouldn't take too many bytes,
%% don't try to stream forever.
(Rest, _) when byte_size(Rest) < 16 ->
more;
(_, _) ->
{error, badarg}
end);
%% <<"\n">> from trailing <<"\r\n">>.
te_chunked(<< "\n", Rest/binary>>, {1, Streamed}) ->
{ok, <<>>, Rest, {0, Streamed}};
te_chunked(<<>>, State={1, _Streamed}) ->
{more, 1, <<>>, State};
%% Remainder of chunk (if any) and as much of trailing <<"\r\n">> as possible.
te_chunked(Data, {ChunkRem, Streamed}) when byte_size(Data) >= ChunkRem - 2 ->
ChunkSize = ChunkRem - 2,
Streamed2 = Streamed + ChunkSize,
case Data of
<< Chunk:ChunkSize/binary, "\r\n", Rest/binary >> ->
{ok, Chunk, Rest, {0, Streamed2}};
<< Chunk:ChunkSize/binary, "\r" >> ->
{more, 1, Chunk, {1, Streamed2}};
<< Chunk:ChunkSize/binary >> ->
{more, 2, Chunk, {2, Streamed2}}
end;
%% Incomplete chunk.
te_chunked(Data, {ChunkRem, Streamed}) ->
ChunkRem2 = ChunkRem - byte_size(Data),
Streamed2 = Streamed + byte_size(Data),
{more, ChunkRem2, Data, {ChunkRem2, Streamed2}}.
%% @doc Decode an identity stream.
-spec te_identity(Bin, TransferState)
-> {more, non_neg_integer(), Bin, TransferState}
| {done, Bin, non_neg_integer(), Bin}
when Bin::binary(), TransferState::{non_neg_integer(), non_neg_integer()}.
te_identity(Data, {Streamed, Total})
when Streamed + byte_size(Data) < Total ->
Streamed2 = Streamed + byte_size(Data),
{more, Total - Streamed2, Data, {Streamed2, Total}};
te_identity(Data, {Streamed, Total}) ->
Size = Total - Streamed,
<< Data2:Size/binary, Rest/binary >> = Data,
{done, Data2, Total, Rest}.
%% @doc Decode an identity content.
%% @todo Move this to cowlib too I suppose. :-)
-spec ce_identity(binary()) -> {ok, binary()}.
ce_identity(Data) ->
{ok, Data}.

View file

@ -632,7 +632,7 @@ stream_body(MaxLength, Req=#http_req{body_state=waiting, version=Version,
{ok, [<<"chunked">>], Req2} ->
stream_body(MaxLength, Req2#http_req{body_state=
{stream, 0,
fun cowboy_http:te_chunked/2, {0, 0},
fun cow_http_te:stream_chunked/2, {0, 0},
fun cowboy_http:ce_identity/1}});
{ok, [<<"identity">>], Req2} ->
{Length, Req3} = body_length(Req2),
@ -642,7 +642,7 @@ stream_body(MaxLength, Req=#http_req{body_state=waiting, version=Version,
Length ->
stream_body(MaxLength, Req3#http_req{body_state=
{stream, Length,
fun cowboy_http:te_identity/2, {0, Length},
fun cow_http_te:stream_identity/2, {0, Length},
fun cowboy_http:ce_identity/1}})
end
end;
@ -666,31 +666,34 @@ stream_body_recv(MaxLength, Req=#http_req{
{error, Reason} -> {error, Reason}
end.
%% @todo Handle chunked after-the-facts headers.
%% @todo Depending on the length returned we might want to 0 or +5 it.
-spec transfer_decode(binary(), Req)
-> {ok, binary(), Req} | {error, atom()} when Req::req().
transfer_decode(Data, Req=#http_req{body_state={stream, _,
TransferDecode, TransferState, ContentDecode}}) ->
case TransferDecode(Data, TransferState) of
{ok, Data2, Rest, TransferState2} ->
content_decode(ContentDecode, Data2,
Req#http_req{buffer=Rest, body_state={stream, 0,
TransferDecode, TransferState2, ContentDecode}});
%% @todo {header(s) for chunked
more ->
stream_body_recv(0, Req#http_req{buffer=Data, body_state={stream,
0, TransferDecode, TransferState, ContentDecode}});
{more, Length, Data2, TransferState2} ->
{more, Data2, TransferState2} ->
content_decode(ContentDecode, Data2,
Req#http_req{body_state={stream, 0,
TransferDecode, TransferState2, ContentDecode}});
{more, Data2, Length, TransferState2} ->
content_decode(ContentDecode, Data2,
Req#http_req{body_state={stream, Length,
TransferDecode, TransferState2, ContentDecode}});
{more, Data2, Length, Rest, TransferState2} ->
content_decode(ContentDecode, Data2,
Req#http_req{buffer=Rest, body_state={stream, Length,
TransferDecode, TransferState2, ContentDecode}});
{done, Length, Rest} ->
Req2 = transfer_decode_done(Length, Rest, Req),
{done, Req2};
{done, Data2, Length, Rest} ->
Req2 = transfer_decode_done(Length, Rest, Req),
content_decode(ContentDecode, Data2, Req2);
{error, Reason} ->
{error, Reason}
content_decode(ContentDecode, Data2, Req2)
end.
-spec transfer_decode_done(non_neg_integer(), binary(), Req)