aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKim Alvefur <zash@zash.se>2023-08-23 12:18:34 +0200
committerKim Alvefur <zash@zash.se>2023-08-23 12:18:34 +0200
commit674b91b82b0ea6e3462aa5b26a424925a08fade8 (patch)
tree33c79654e6a8e577c3484f16fbde3f57fa30ec45
parente8128c1d608cb18b1d1e913395fb9b897dd4525e (diff)
downloadprosody-674b91b82b0ea6e3462aa5b26a424925a08fade8.tar.gz
prosody-674b91b82b0ea6e3462aa5b26a424925a08fade8.zip
net.http.parser: Reject overlarge header section earlier
This case would eventually be rejected by the buffer size limit.
-rw-r--r--net/http/parser.lua8
-rw-r--r--spec/net_http_parser_spec.lua27
2 files changed, 34 insertions, 1 deletions
diff --git a/net/http/parser.lua b/net/http/parser.lua
index a6624662..12d40883 100644
--- a/net/http/parser.lua
+++ b/net/http/parser.lua
@@ -59,7 +59,13 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb)
while buffer:length() > 0 do
if state == nil then -- read request
local index = buffer:sub(1, headlimit):find("\r\n\r\n", nil, true);
- if not index then return; end -- not enough data
+ if not index then
+ if buffer:length() > headlimit then
+ return error_cb("header-too-large");
+ end
+ -- not enough data
+ return;
+ end
-- FIXME was reason_phrase meant to be passed on somewhere?
local method, path, httpversion, status_code, reason_phrase; -- luacheck: ignore reason_phrase
local first_line;
diff --git a/spec/net_http_parser_spec.lua b/spec/net_http_parser_spec.lua
index 9da8ca73..84a81f05 100644
--- a/spec/net_http_parser_spec.lua
+++ b/spec/net_http_parser_spec.lua
@@ -167,6 +167,33 @@ Transfer-Encoding: chunked
}
);
end);
+
+ it("should reject very large request heads", function()
+ local finished = false;
+ local success_cb = spy.new(function()
+ finished = true;
+ end)
+ local error_cb = spy.new(function()
+ finished = true;
+ end)
+ local parser = http_parser.new(success_cb, error_cb, "server", function()
+ return { head_size_limit = 1024; body_size_limit = 1024; buffer_size_limit = 2048 };
+ end)
+ parser:feed("GET / HTTP/1.1\r\n");
+ for i = 1, 64 do -- * header line > buffer_size_limit
+ parser:feed(string.format("Header-%04d: Yet-AnotherValue\r\n", i));
+ if finished then
+ -- should hit an error around half-way
+ break
+ end
+ end
+ if not finished then
+ parser:feed("\r\n")
+ end
+ assert.spy(success_cb).was_called(0);
+ assert.spy(error_cb).was_called(1);
+ assert.spy(error_cb).was_called_with("header-too-large");
+ end)
end);
it("should handle large chunked responses", function ()