aboutsummaryrefslogtreecommitdiffstats
path: root/spec
diff options
context:
space:
mode:
authorKim Alvefur <zash@zash.se>2023-08-23 12:18:34 +0200
committerKim Alvefur <zash@zash.se>2023-08-23 12:18:34 +0200
commit674b91b82b0ea6e3462aa5b26a424925a08fade8 (patch)
tree33c79654e6a8e577c3484f16fbde3f57fa30ec45 /spec
parente8128c1d608cb18b1d1e913395fb9b897dd4525e (diff)
downloadprosody-674b91b82b0ea6e3462aa5b26a424925a08fade8.tar.gz
prosody-674b91b82b0ea6e3462aa5b26a424925a08fade8.zip
net.http.parser: Reject overlarge header section earlier
This case would eventually be rejected by the buffer size limit.
Diffstat (limited to 'spec')
-rw-r--r--spec/net_http_parser_spec.lua27
1 files changed, 27 insertions, 0 deletions
diff --git a/spec/net_http_parser_spec.lua b/spec/net_http_parser_spec.lua
index 9da8ca73..84a81f05 100644
--- a/spec/net_http_parser_spec.lua
+++ b/spec/net_http_parser_spec.lua
@@ -167,6 +167,33 @@ Transfer-Encoding: chunked
}
);
end);
+
+ it("should reject very large request heads", function()
+ local finished = false;
+ local success_cb = spy.new(function()
+ finished = true;
+ end)
+ local error_cb = spy.new(function()
+ finished = true;
+ end)
+ local parser = http_parser.new(success_cb, error_cb, "server", function()
+ return { head_size_limit = 1024; body_size_limit = 1024; buffer_size_limit = 2048 };
+ end)
+ parser:feed("GET / HTTP/1.1\r\n");
+ for i = 1, 64 do -- * header line > buffer_size_limit
+ parser:feed(string.format("Header-%04d: Yet-AnotherValue\r\n", i));
+ if finished then
+ -- should hit an error around half-way
+ break
+ end
+ end
+ if not finished then
+ parser:feed("\r\n")
+ end
+ assert.spy(success_cb).was_called(0);
+ assert.spy(error_cb).was_called(1);
+ assert.spy(error_cb).was_called_with("header-too-large");
+ end)
end);
it("should handle large chunked responses", function ()