mirror of
https://github.com/ninenines/cowboy.git
synced 2025-07-14 12:20:24 +00:00
Add experimental cowboy_compress_h stream handler
Currently marked as experimental because it can't be tweaked (just enabled/disabled) and because it is not documented yet.
This commit is contained in:
parent
3b91523a3c
commit
3e05ab8f82
10 changed files with 405 additions and 43 deletions
|
@ -1,7 +1,7 @@
|
|||
{application, 'cowboy', [
|
||||
{description, "Small, fast, modern HTTP server."},
|
||||
{vsn, "2.0.0-pre.4"},
|
||||
{modules, ['cowboy','cowboy_app','cowboy_bstr','cowboy_clear','cowboy_clock','cowboy_constraints','cowboy_handler','cowboy_http','cowboy_http2','cowboy_loop','cowboy_middleware','cowboy_req','cowboy_rest','cowboy_router','cowboy_static','cowboy_stream','cowboy_stream_h','cowboy_sub_protocol','cowboy_sup','cowboy_tls','cowboy_websocket']},
|
||||
{modules, ['cowboy','cowboy_app','cowboy_bstr','cowboy_clear','cowboy_clock','cowboy_compress_h','cowboy_constraints','cowboy_handler','cowboy_http','cowboy_http2','cowboy_loop','cowboy_middleware','cowboy_req','cowboy_rest','cowboy_router','cowboy_static','cowboy_stream','cowboy_stream_h','cowboy_sub_protocol','cowboy_sup','cowboy_tls','cowboy_websocket']},
|
||||
{registered, [cowboy_sup,cowboy_clock]},
|
||||
{applications, [kernel,stdlib,crypto,cowlib,ranch]},
|
||||
{mod, {cowboy_app, []}},
|
||||
|
|
167
src/cowboy_compress_h.erl
Normal file
167
src/cowboy_compress_h.erl
Normal file
|
@ -0,0 +1,167 @@
|
|||
%% Copyright (c) 2017, Loïc Hoguin <essen@ninenines.eu>
|
||||
%%
|
||||
%% Permission to use, copy, modify, and/or distribute this software for any
|
||||
%% purpose with or without fee is hereby granted, provided that the above
|
||||
%% copyright notice and this permission notice appear in all copies.
|
||||
%%
|
||||
%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
-module(cowboy_compress_h).
|
||||
-behavior(cowboy_stream).
|
||||
|
||||
-export([init/3]).
|
||||
-export([data/4]).
|
||||
-export([info/3]).
|
||||
-export([terminate/3]).
|
||||
|
||||
-record(state, {
|
||||
next :: any(),
|
||||
compress = undefined :: undefined | gzip,
|
||||
deflate = undefined :: undefined | zlib:zstream()
|
||||
}).
|
||||
|
||||
-spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts())
|
||||
-> {cowboy_stream:commands(), #state{}}.
|
||||
init(StreamID, Req, Opts) ->
|
||||
State0 = check_req(Req),
|
||||
{Commands0, Next} = cowboy_stream:init(StreamID, Req, Opts),
|
||||
fold(Commands0, State0#state{next=Next}).
|
||||
|
||||
-spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State)
|
||||
-> {cowboy_stream:commands(), State} when State::#state{}.
|
||||
data(StreamID, IsFin, Data, State0=#state{next=Next0}) ->
|
||||
{Commands0, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0),
|
||||
fold(Commands0, State0#state{next=Next}).
|
||||
|
||||
-spec info(cowboy_stream:streamid(), any(), State)
|
||||
-> {cowboy_stream:commands(), State} when State::#state{}.
|
||||
info(StreamID, Info, State0=#state{next=Next0}) ->
|
||||
{Commands0, Next} = cowboy_stream:info(StreamID, Info, Next0),
|
||||
fold(Commands0, State0#state{next=Next}).
|
||||
|
||||
-spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> any().
|
||||
terminate(StreamID, Reason, #state{next=Next, deflate=Z}) ->
|
||||
%% Clean the zlib:stream() in case something went wrong.
|
||||
%% In the normal scenario the stream is already closed.
|
||||
case Z of
|
||||
undefined -> ok;
|
||||
_ -> zlib:close(Z)
|
||||
end,
|
||||
cowboy_stream:terminate(StreamID, Reason, Next).
|
||||
|
||||
%% Internal.
|
||||
|
||||
%% Check if the client supports decoding of gzip responses.
|
||||
check_req(Req) ->
|
||||
case cowboy_req:parse_header(<<"accept-encoding">>, Req) of
|
||||
%% Client doesn't support any compression algorithm.
|
||||
undefined ->
|
||||
#state{compress=undefined};
|
||||
Encodings ->
|
||||
%% We only support gzip so look for it specifically.
|
||||
%% @todo A recipient SHOULD consider "x-gzip" to be
|
||||
%% equivalent to "gzip". (RFC7230 4.2.3)
|
||||
case [E || E={<<"gzip">>, Q} <- Encodings, Q =/= 0] of
|
||||
[] ->
|
||||
#state{compress=undefined};
|
||||
_ ->
|
||||
#state{compress=gzip}
|
||||
end
|
||||
end.
|
||||
|
||||
%% Do not compress responses that contain the content-encoding header.
|
||||
check_resp_headers(#{<<"content-encoding">> := _}, State) ->
|
||||
State#state{compress=undefined};
|
||||
check_resp_headers(_, State) ->
|
||||
State.
|
||||
|
||||
fold(Commands, State=#state{compress=undefined}) ->
|
||||
{Commands, State};
|
||||
fold(Commands, State) ->
|
||||
fold(Commands, State, []).
|
||||
|
||||
fold([], State, Acc) ->
|
||||
{lists:reverse(Acc), State};
|
||||
%% We do not compress sendfile bodies.
|
||||
fold([Response={response, _, _, {sendfile, _, _, _}}|Tail], State, Acc) ->
|
||||
fold(Tail, State, [Response|Acc]);
|
||||
%% We compress full responses directly, unless they are lower than
|
||||
%% 300 bytes or we find we are not able to by looking at the headers.
|
||||
%% @todo It might be good to allow this size to be configured?
|
||||
fold([Response0={response, _, Headers, Body}|Tail], State0, Acc) ->
|
||||
case check_resp_headers(Headers, State0) of
|
||||
State=#state{compress=undefined} ->
|
||||
fold(Tail, State, [Response0|Acc]);
|
||||
State1 ->
|
||||
BodyLength = iolist_size(Body),
|
||||
if
|
||||
BodyLength =< 300 ->
|
||||
fold(Tail, State1, [Response0|Acc]);
|
||||
true ->
|
||||
{Response, State} = gzip_response(Response0, State1),
|
||||
fold(Tail, State, [Response|Acc])
|
||||
end
|
||||
end;
|
||||
%% Check headers and initiate compression...
|
||||
fold([Response0={headers, _, Headers}|Tail], State0, Acc) ->
|
||||
case check_resp_headers(Headers, State0) of
|
||||
State=#state{compress=undefined} ->
|
||||
fold(Tail, State, [Response0|Acc]);
|
||||
State1 ->
|
||||
{Response, State} = gzip_headers(Response0, State1),
|
||||
fold(Tail, State, [Response|Acc])
|
||||
end;
|
||||
%% then compress each data commands individually.
|
||||
fold([Data0={data, _, _}|Tail], State0=#state{compress=gzip}, Acc) ->
|
||||
{Data, State} = gzip_data(Data0, State0),
|
||||
fold(Tail, State, [Data|Acc]);
|
||||
%% Otherwise, we either have an unrelated command, or a data command
|
||||
%% with compression disabled.
|
||||
fold([Command|Tail], State, Acc) ->
|
||||
fold(Tail, State, [Command|Acc]).
|
||||
|
||||
gzip_response({response, Status, Headers, Body}, State) ->
|
||||
%% We can't call zlib:gzip/1 because it does an
|
||||
%% iolist_to_binary(GzBody) at the end to return
|
||||
%% a binary(). Therefore the code here is largely
|
||||
%% a duplicate of the code of that function.
|
||||
Z = zlib:open(),
|
||||
GzBody = try
|
||||
%% 31 = 16+?MAX_WBITS from zlib.erl
|
||||
%% @todo It might be good to allow them to be configured?
|
||||
zlib:deflateInit(Z, default, deflated, 31, 8, default),
|
||||
Gz = zlib:deflate(Z, Body, finish),
|
||||
zlib:deflateEnd(Z),
|
||||
Gz
|
||||
after
|
||||
zlib:close(Z)
|
||||
end,
|
||||
{{response, Status, Headers#{
|
||||
<<"content-length">> => integer_to_binary(iolist_size(GzBody)),
|
||||
<<"content-encoding">> => <<"gzip">>
|
||||
}, GzBody}, State}.
|
||||
|
||||
gzip_headers({headers, Status, Headers0}, State) ->
|
||||
Z = zlib:open(),
|
||||
%% We use the same arguments as when compressing the body fully.
|
||||
%% @todo It might be good to allow them to be configured?
|
||||
zlib:deflateInit(Z, default, deflated, 31, 8, default),
|
||||
Headers = maps:remove(<<"content-length">>, Headers0),
|
||||
{{headers, Status, Headers#{
|
||||
<<"content-encoding">> => <<"gzip">>
|
||||
}}, State#state{deflate=Z}}.
|
||||
|
||||
gzip_data({data, nofin, Data0}, State=#state{deflate=Z}) ->
|
||||
Data = zlib:deflate(Z, Data0),
|
||||
{{data, nofin, Data}, State};
|
||||
gzip_data({data, fin, Data0}, State=#state{deflate=Z}) ->
|
||||
Data = zlib:deflate(Z, Data0, finish),
|
||||
zlib:deflateEnd(Z),
|
||||
zlib:close(Z),
|
||||
{{data, fin, Data}, State#state{deflate=undefined}}.
|
129
test/compress_SUITE.erl
Normal file
129
test/compress_SUITE.erl
Normal file
|
@ -0,0 +1,129 @@
|
|||
%% Copyright (c) 2017, Loïc Hoguin <essen@ninenines.eu>
|
||||
%%
|
||||
%% Permission to use, copy, modify, and/or distribute this software for any
|
||||
%% purpose with or without fee is hereby granted, provided that the above
|
||||
%% copyright notice and this permission notice appear in all copies.
|
||||
%%
|
||||
%% THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
%% WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
%% MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
%% ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
%% WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
%% ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
%% OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
-module(compress_SUITE).
|
||||
-compile(export_all).
|
||||
|
||||
-import(ct_helper, [config/2]).
|
||||
-import(ct_helper, [doc/1]).
|
||||
-import(cowboy_test, [gun_open/1]).
|
||||
|
||||
%% ct.
|
||||
|
||||
all() ->
|
||||
[
|
||||
{group, http_compress},
|
||||
{group, https_compress},
|
||||
{group, h2_compress},
|
||||
{group, h2c_compress}
|
||||
].
|
||||
|
||||
groups() ->
|
||||
cowboy_test:common_groups(ct_helper:all(?MODULE)).
|
||||
|
||||
init_per_group(Name, Config) ->
|
||||
cowboy_test:init_common_groups(Name, Config, ?MODULE).
|
||||
|
||||
end_per_group(Name, _) ->
|
||||
cowboy:stop_listener(Name).
|
||||
|
||||
%% Routes.
|
||||
|
||||
init_dispatch(Config) ->
|
||||
cowboy_router:compile([{"[...]", [
|
||||
{"/reply/:what", compress_h, reply},
|
||||
{"/stream_reply/:what", compress_h, stream_reply}
|
||||
]}]).
|
||||
|
||||
%% Internal.
|
||||
|
||||
do_get(Path, ReqHeaders, Config) ->
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:get(ConnPid, Path, ReqHeaders),
|
||||
{response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref),
|
||||
{ok, Body} = case IsFin of
|
||||
nofin -> gun:await_body(ConnPid, Ref);
|
||||
fin -> {ok, <<>>}
|
||||
end,
|
||||
gun:close(ConnPid),
|
||||
{Status, RespHeaders, Body}.
|
||||
|
||||
%% Tests.
|
||||
|
||||
gzip_accept_encoding_missing(Config) ->
|
||||
doc("Don't send accept-encoding; get an uncompressed response."),
|
||||
{200, Headers, _} = do_get("/reply/large",
|
||||
[], Config),
|
||||
false = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
{_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers),
|
||||
ok.
|
||||
|
||||
gzip_accept_encoding_no_gzip(Config) ->
|
||||
doc("Send accept-encoding: compress (unsupported by Cowboy); get an uncompressed response."),
|
||||
{200, Headers, _} = do_get("/reply/large",
|
||||
[{<<"accept-encoding">>, <<"compress">>}], Config),
|
||||
false = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
{_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers),
|
||||
ok.
|
||||
|
||||
gzip_reply_content_encoding(Config) ->
|
||||
doc("Reply with content-encoding header; get an uncompressed response."),
|
||||
{200, Headers, _} = do_get("/reply/content-encoding",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
%% We set the content-encoding to compress; without actually compressing.
|
||||
{_, <<"compress">>} = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
{_, <<"100000">>} = lists:keyfind(<<"content-length">>, 1, Headers),
|
||||
ok.
|
||||
|
||||
gzip_reply_large_body(Config) ->
|
||||
doc("Reply a large body; get a gzipped response."),
|
||||
{200, Headers, GzBody} = do_get("/reply/large",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
{_, <<"gzip">>} = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
{_, Length} = lists:keyfind(<<"content-length">>, 1, Headers),
|
||||
ct:log("Original length: 100000; compressed: ~s.", [Length]),
|
||||
_ = zlib:gunzip(GzBody),
|
||||
ok.
|
||||
|
||||
gzip_reply_sendfile(Config) ->
|
||||
doc("Reply using sendfile; get an uncompressed response."),
|
||||
{200, Headers, Body} = do_get("/reply/sendfile",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
false = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
ct:log("Body received:~n~p~n", [Body]),
|
||||
ok.
|
||||
|
||||
gzip_reply_small_body(Config) ->
|
||||
doc("Reply a small body; get an uncompressed response."),
|
||||
{200, Headers, _} = do_get("/reply/small",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
false = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
{_, <<"100">>} = lists:keyfind(<<"content-length">>, 1, Headers),
|
||||
ok.
|
||||
|
||||
gzip_stream_reply(Config) ->
|
||||
doc("Stream reply; get a gzipped response."),
|
||||
{200, Headers, GzBody} = do_get("/stream_reply/large",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
{_, <<"gzip">>} = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
_ = zlib:gunzip(GzBody),
|
||||
ok.
|
||||
|
||||
gzip_stream_reply_content_encoding(Config) ->
|
||||
doc("Stream reply with content-encoding header; get an uncompressed response."),
|
||||
{200, Headers, Body} = do_get("/stream_reply/content-encoding",
|
||||
[{<<"accept-encoding">>, <<"gzip">>}], Config),
|
||||
{_, <<"compress">>} = lists:keyfind(<<"content-encoding">>, 1, Headers),
|
||||
100000 = iolist_size(Body),
|
||||
ok.
|
|
@ -43,10 +43,11 @@ common_all() ->
|
|||
{group, http},
|
||||
{group, https},
|
||||
{group, h2},
|
||||
{group, h2c}%,
|
||||
%% @todo
|
||||
% {group, http_compress},
|
||||
% {group, https_compress}
|
||||
{group, h2c},
|
||||
{group, http_compress},
|
||||
{group, https_compress},
|
||||
{group, h2_compress},
|
||||
{group, h2c_compress}
|
||||
].
|
||||
|
||||
common_groups(Tests) ->
|
||||
|
@ -54,31 +55,51 @@ common_groups(Tests) ->
|
|||
{http, [parallel], Tests},
|
||||
{https, [parallel], Tests},
|
||||
{h2, [parallel], Tests},
|
||||
{h2c, [parallel], Tests}%,
|
||||
%% @todo
|
||||
% {http_compress, [parallel], Tests},
|
||||
% {https_compress, [parallel], Tests}
|
||||
{h2c, [parallel], Tests},
|
||||
{http_compress, [parallel], Tests},
|
||||
{https_compress, [parallel], Tests},
|
||||
{h2_compress, [parallel], Tests},
|
||||
{h2c_compress, [parallel], Tests}
|
||||
].
|
||||
|
||||
init_common_groups(Name = http, Config, Mod) ->
|
||||
init_http(Name, #{env => #{dispatch => Mod:init_dispatch(Config)}}, Config);
|
||||
init_http(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)}
|
||||
}, Config);
|
||||
init_common_groups(Name = https, Config, Mod) ->
|
||||
init_https(Name, #{env => #{dispatch => Mod:init_dispatch(Config)}}, Config);
|
||||
init_https(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)}
|
||||
}, Config);
|
||||
init_common_groups(Name = h2, Config, Mod) ->
|
||||
init_http2(Name, #{env => #{dispatch => Mod:init_dispatch(Config)}}, Config);
|
||||
init_http2(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)}
|
||||
}, Config);
|
||||
init_common_groups(Name = h2c, Config, Mod) ->
|
||||
Config1 = init_http(Name, #{env => #{dispatch => Mod:init_dispatch(Config)}}, Config),
|
||||
Config1 = init_http(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)}
|
||||
}, Config),
|
||||
lists:keyreplace(protocol, 1, Config1, {protocol, http2});
|
||||
init_common_groups(Name = http_compress, Config, Mod) ->
|
||||
init_http(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)},
|
||||
compress => true
|
||||
stream_handlers => [cowboy_compress_h, cowboy_stream_h]
|
||||
}, Config);
|
||||
init_common_groups(Name = https_compress, Config, Mod) ->
|
||||
init_https(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)},
|
||||
compress => true
|
||||
}, Config).
|
||||
stream_handlers => [cowboy_compress_h, cowboy_stream_h]
|
||||
}, Config);
|
||||
init_common_groups(Name = h2_compress, Config, Mod) ->
|
||||
init_http2(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)},
|
||||
stream_handlers => [cowboy_compress_h, cowboy_stream_h]
|
||||
}, Config);
|
||||
init_common_groups(Name = h2c_compress, Config, Mod) ->
|
||||
Config1 = init_http(Name, #{
|
||||
env => #{dispatch => Mod:init_dispatch(Config)},
|
||||
stream_handlers => [cowboy_compress_h, cowboy_stream_h]
|
||||
}, Config),
|
||||
lists:keyreplace(protocol, 1, Config1, {protocol, http2}).
|
||||
|
||||
%% Support functions for testing using Gun.
|
||||
|
||||
|
|
37
test/handlers/compress_h.erl
Normal file
37
test/handlers/compress_h.erl
Normal file
|
@ -0,0 +1,37 @@
|
|||
%% This module sends a response body of varying sizes to test
|
||||
%% the cowboy_compress_h stream handler.
|
||||
|
||||
-module(compress_h).
|
||||
|
||||
-export([init/2]).
|
||||
|
||||
init(Req0, State=reply) ->
|
||||
Req = case cowboy_req:binding(what, Req0) of
|
||||
<<"small">> ->
|
||||
cowboy_req:reply(200, #{}, lists:duplicate(100, $a), Req0);
|
||||
<<"large">> ->
|
||||
cowboy_req:reply(200, #{}, lists:duplicate(100000, $a), Req0);
|
||||
<<"content-encoding">> ->
|
||||
cowboy_req:reply(200, #{<<"content-encoding">> => <<"compress">>},
|
||||
lists:duplicate(100000, $a), Req0);
|
||||
<<"sendfile">> ->
|
||||
AppFile = code:where_is_file("cowboy.app"),
|
||||
Size = filelib:file_size(AppFile),
|
||||
cowboy_req:reply(200, #{}, {sendfile, 0, Size, AppFile}, Req0)
|
||||
end,
|
||||
{ok, Req, State};
|
||||
init(Req0, State=stream_reply) ->
|
||||
Req = case cowboy_req:binding(what, Req0) of
|
||||
<<"large">> ->
|
||||
stream_reply(#{}, Req0);
|
||||
<<"content-encoding">> ->
|
||||
stream_reply(#{<<"content-encoding">> => <<"compress">>}, Req0)
|
||||
end,
|
||||
{ok, Req, State}.
|
||||
|
||||
stream_reply(Headers, Req0) ->
|
||||
Data = lists:duplicate(10000, $a),
|
||||
Req = cowboy_req:stream_reply(200, Headers, Req0),
|
||||
_ = [cowboy_req:stream_body(Data, nofin, Req) || _ <- lists:seq(1,9)],
|
||||
cowboy_req:stream_body(Data, fin, Req),
|
||||
Req.
|
|
@ -79,5 +79,5 @@ read_body(Req0, Acc) ->
|
|||
|
||||
value_to_iodata(V) when is_integer(V) -> integer_to_binary(V);
|
||||
value_to_iodata(V) when is_atom(V) -> atom_to_binary(V, latin1);
|
||||
value_to_iodata(V) when is_list(V); is_tuple(V); is_map(V) -> io_lib:format("~p", [V]);
|
||||
value_to_iodata(V) when is_list(V); is_tuple(V); is_map(V) -> io_lib:format("~999999p", [V]);
|
||||
value_to_iodata(V) -> V.
|
||||
|
|
|
@ -28,7 +28,7 @@ do(<<"set_resp_header">>, Req0, Opts) ->
|
|||
do(<<"set_resp_headers">>, Req0, Opts) ->
|
||||
Req = cowboy_req:set_resp_headers(#{
|
||||
<<"content-type">> => <<"text/plain">>,
|
||||
<<"content-encoding">> => <<"gzip">>
|
||||
<<"content-encoding">> => <<"compress">>
|
||||
}, Req0),
|
||||
{ok, cowboy_req:reply(200, #{}, "OK", Req), Opts};
|
||||
do(<<"resp_header_defined">>, Req0, Opts) ->
|
||||
|
@ -44,7 +44,7 @@ do(<<"resp_headers">>, Req0, Opts) ->
|
|||
Req1 = cowboy_req:set_resp_header(<<"server">>, <<"nginx">>, Req0),
|
||||
Req = cowboy_req:set_resp_headers(#{
|
||||
<<"content-type">> => <<"text/plain">>,
|
||||
<<"content-encoding">> => <<"gzip">>
|
||||
<<"content-encoding">> => <<"compress">>
|
||||
}, Req1),
|
||||
Headers = cowboy_req:resp_headers(Req),
|
||||
true = maps:is_key(<<"server">>, Headers),
|
||||
|
|
|
@ -47,41 +47,45 @@ init_dispatch(_) ->
|
|||
long_polling(Config) ->
|
||||
doc("Simple long-polling."),
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:get(ConnPid, "/long_polling"),
|
||||
Ref = gun:get(ConnPid, "/long_polling", [{<<"accept-encoding">>, <<"gzip">>}]),
|
||||
{response, fin, 102, _} = gun:await(ConnPid, Ref),
|
||||
ok.
|
||||
|
||||
long_polling_body(Config) ->
|
||||
doc("Long-polling with a body that falls within the configurable limits."),
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:post(ConnPid, "/long_polling", [], << 0:5000/unit:8 >>),
|
||||
Ref = gun:post(ConnPid, "/long_polling", [{<<"accept-encoding">>, <<"gzip">>}],
|
||||
<< 0:5000/unit:8 >>),
|
||||
{response, fin, 102, _} = gun:await(ConnPid, Ref),
|
||||
ok.
|
||||
|
||||
long_polling_body_too_large(Config) ->
|
||||
doc("Long-polling with a body that exceeds the configurable limits."),
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:post(ConnPid, "/long_polling", [], << 0:100000/unit:8 >>),
|
||||
Ref = gun:post(ConnPid, "/long_polling", [{<<"accept-encoding">>, <<"gzip">>}],
|
||||
<< 0:100000/unit:8 >>),
|
||||
{response, fin, 500, _} = gun:await(ConnPid, Ref),
|
||||
ok.
|
||||
|
||||
long_polling_pipeline(Config) ->
|
||||
doc("Pipeline of long-polling calls."),
|
||||
ConnPid = gun_open(Config),
|
||||
Refs = [gun:get(ConnPid, "/long_polling") || _ <- lists:seq(1, 2)],
|
||||
Refs = [gun:get(ConnPid, "/long_polling", [{<<"accept-encoding">>, <<"gzip">>}])
|
||||
|| _ <- lists:seq(1, 2)],
|
||||
_ = [{response, fin, 102, _} = gun:await(ConnPid, Ref) || Ref <- Refs],
|
||||
ok.
|
||||
|
||||
loop_body(Config) ->
|
||||
doc("Check that a loop handler can read the request body in info/3."),
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:post(ConnPid, "/loop_body", [], << 0:100000/unit:8 >>),
|
||||
Ref = gun:post(ConnPid, "/loop_body", [{<<"accept-encoding">>, <<"gzip">>}],
|
||||
<< 0:100000/unit:8 >>),
|
||||
{response, fin, 200, _} = gun:await(ConnPid, Ref),
|
||||
ok.
|
||||
|
||||
loop_timeout(Config) ->
|
||||
doc("Ensure that the loop handler timeout results in a 204 response."),
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:get(ConnPid, "/loop_timeout"),
|
||||
Ref = gun:get(ConnPid, "/loop_timeout", [{<<"accept-encoding">>, <<"gzip">>}]),
|
||||
{response, fin, 204, _} = gun:await(ConnPid, Ref),
|
||||
ok.
|
||||
|
|
|
@ -25,14 +25,7 @@ all() ->
|
|||
cowboy_test:common_all().
|
||||
|
||||
groups() ->
|
||||
AllTests = ct_helper:all(?MODULE),
|
||||
[
|
||||
{http, [parallel], AllTests},
|
||||
{https, [parallel], AllTests},
|
||||
{h2, [parallel], AllTests},
|
||||
{h2c, [parallel], AllTests}
|
||||
%% @todo With compression enabled.
|
||||
].
|
||||
cowboy_test:common_groups(ct_helper:all(?MODULE)).
|
||||
|
||||
init_per_suite(Config) ->
|
||||
ct_helper:create_static_dir(config(priv_dir, Config) ++ "/static"),
|
||||
|
@ -74,30 +67,31 @@ do_body(Method, Path, Config) ->
|
|||
do_body(Method, Path, Headers, Config) ->
|
||||
do_body(Method, Path, Headers, <<>>, Config).
|
||||
|
||||
do_body(Method, Path, Headers, Body, Config) ->
|
||||
do_body(Method, Path, Headers0, Body, Config) ->
|
||||
ConnPid = gun_open(Config),
|
||||
Headers = [{<<"accept-encoding">>, <<"gzip">>}|Headers0],
|
||||
Ref = case Body of
|
||||
<<>> -> gun:request(ConnPid, Method, Path, Headers);
|
||||
_ -> gun:request(ConnPid, Method, Path, Headers, Body)
|
||||
end,
|
||||
{response, IsFin, 200, _} = gun:await(ConnPid, Ref),
|
||||
{response, IsFin, 200, RespHeaders} = gun:await(ConnPid, Ref),
|
||||
{ok, RespBody} = case IsFin of
|
||||
nofin -> gun:await_body(ConnPid, Ref);
|
||||
fin -> {ok, <<>>}
|
||||
end,
|
||||
gun:close(ConnPid),
|
||||
RespBody.
|
||||
do_decode(RespHeaders, RespBody).
|
||||
|
||||
do_get(Path, Config) ->
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:get(ConnPid, Path, []),
|
||||
{response, IsFin, Status, Headers} = gun:await(ConnPid, Ref),
|
||||
Ref = gun:get(ConnPid, Path, [{<<"accept-encoding">>, <<"gzip">>}]),
|
||||
{response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref),
|
||||
{ok, RespBody} = case IsFin of
|
||||
nofin -> gun:await_body(ConnPid, Ref);
|
||||
fin -> {ok, <<>>}
|
||||
end,
|
||||
gun:close(ConnPid),
|
||||
{Status, Headers, RespBody}.
|
||||
{Status, RespHeaders, do_decode(RespHeaders, RespBody)}.
|
||||
|
||||
do_get_body(Path, Config) ->
|
||||
do_get_body(Path, [], Config).
|
||||
|
@ -105,6 +99,12 @@ do_get_body(Path, Config) ->
|
|||
do_get_body(Path, Headers, Config) ->
|
||||
do_body("GET", Path, Headers, Config).
|
||||
|
||||
do_decode(Headers, Body) ->
|
||||
case lists:keyfind(<<"content-encoding">>, 1, Headers) of
|
||||
{_, <<"gzip">>} -> zlib:gunzip(Body);
|
||||
_ -> Body
|
||||
end.
|
||||
|
||||
%% Tests: Request.
|
||||
|
||||
binding(Config) ->
|
||||
|
@ -129,7 +129,8 @@ header(Config) ->
|
|||
|
||||
headers(Config) ->
|
||||
doc("Request headers."),
|
||||
<< "#{<<\"header\">> => <<\"value\">>", _/bits >>
|
||||
%% We always send accept-encoding with this test suite's requests.
|
||||
<<"#{<<\"accept-encoding\">> => <<\"gzip\">>,<<\"header\">> => <<\"value\">>", _/bits>>
|
||||
= do_get_body("/headers", [{<<"header">>, "value"}], Config),
|
||||
ok.
|
||||
|
||||
|
|
|
@ -39,8 +39,11 @@ groups() ->
|
|||
{http, [parallel], GroupTests},
|
||||
{https, [parallel], GroupTests},
|
||||
{h2, [parallel], GroupTests},
|
||||
{h2c, [parallel], GroupTests}
|
||||
%% @todo With compression enabled.
|
||||
{h2c, [parallel], GroupTests},
|
||||
{http_compress, [parallel], GroupTests},
|
||||
{https_compress, [parallel], GroupTests},
|
||||
{h2_compress, [parallel], GroupTests},
|
||||
{h2c_compress, [parallel], GroupTests}
|
||||
].
|
||||
|
||||
init_per_suite(Config) ->
|
||||
|
@ -171,7 +174,7 @@ do_get(Path, Config) ->
|
|||
|
||||
do_get(Path, ReqHeaders, Config) ->
|
||||
ConnPid = gun_open(Config),
|
||||
Ref = gun:get(ConnPid, Path, ReqHeaders),
|
||||
Ref = gun:get(ConnPid, Path, [{<<"accept-encoding">>, <<"gzip">>}|ReqHeaders]),
|
||||
{response, IsFin, Status, RespHeaders} = gun:await(ConnPid, Ref),
|
||||
{ok, Body} = case IsFin of
|
||||
nofin -> gun:await_body(ConnPid, Ref);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue