diff options
author | Kim Alvefur <zash@zash.se> | 2023-12-01 17:11:19 +0100 |
---|---|---|
committer | Kim Alvefur <zash@zash.se> | 2023-12-01 17:11:19 +0100 |
commit | 6187c44ca905824d9a713f36e8e35531a5ea2363 (patch) | |
tree | 2950bf0186c9626c96923c1de8aa4403a94485f9 /spec | |
parent | 1f67f1170cee16feeb2f484f47e473de6dce00e0 (diff) | |
parent | 674b91b82b0ea6e3462aa5b26a424925a08fade8 (diff) | |
download | prosody-6187c44ca905824d9a713f36e8e35531a5ea2363.tar.gz prosody-6187c44ca905824d9a713f36e8e35531a5ea2363.zip |
Merge 0.12->trunk
Diffstat (limited to 'spec')
-rw-r--r-- | spec/net_http_parser_spec.lua | 27 |
1 files changed, 27 insertions, 0 deletions
diff --git a/spec/net_http_parser_spec.lua b/spec/net_http_parser_spec.lua index 9da8ca73..84a81f05 100644 --- a/spec/net_http_parser_spec.lua +++ b/spec/net_http_parser_spec.lua @@ -167,6 +167,33 @@ Transfer-Encoding: chunked } ); end); + + it("should reject very large request heads", function() + local finished = false; + local success_cb = spy.new(function() + finished = true; + end) + local error_cb = spy.new(function() + finished = true; + end) + local parser = http_parser.new(success_cb, error_cb, "server", function() + return { head_size_limit = 1024; body_size_limit = 1024; buffer_size_limit = 2048 }; + end) + parser:feed("GET / HTTP/1.1\r\n"); + for i = 1, 64 do -- * header line > buffer_size_limit + parser:feed(string.format("Header-%04d: Yet-AnotherValue\r\n", i)); + if finished then + -- should hit an error around half-way + break + end + end + if not finished then + parser:feed("\r\n") + end + assert.spy(success_cb).was_called(0); + assert.spy(error_cb).was_called(1); + assert.spy(error_cb).was_called_with("header-too-large"); + end) end); it("should handle large chunked responses", function () |