diff options
author | Kim Alvefur <zash@zash.se> | 2020-08-01 18:41:23 +0200 |
---|---|---|
committer | Kim Alvefur <zash@zash.se> | 2020-08-01 18:41:23 +0200 |
commit | 91d2ab91086d2aebcbc4d47a5bce05c6cd3abdcb (patch) | |
tree | 792a543eb0ec61821a6c9d439fca75dfed685c4f /net | |
parent | 64aa6a2a0e704221821cb53ccd90e19fabfa475e (diff) | |
download | prosody-91d2ab91086d2aebcbc4d47a5bce05c6cd3abdcb.tar.gz prosody-91d2ab91086d2aebcbc4d47a5bce05c6cd3abdcb.zip |
net.http.parser: Allow specifying sink for large request bodies
This enables uses such as saving uploaded files directly to a file on
disk or streaming parsing of payloads.
See #726
Diffstat (limited to 'net')
-rw-r--r-- | net/http/parser.lua | 26 |
1 files changed, 22 insertions, 4 deletions
diff --git a/net/http/parser.lua b/net/http/parser.lua index 84b1e005..b8396518 100644 --- a/net/http/parser.lua +++ b/net/http/parser.lua @@ -88,8 +88,6 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb) if not first_line then error = true; return error_cb("invalid-status-line"); end chunked = have_body and headers["transfer-encoding"] == "chunked"; len = tonumber(headers["content-length"]); -- TODO check for invalid len - if len and len > bodylimit then error = true; return error_cb("content-length-limit-exceeded"); end - -- TODO ask a callback whether to proceed in case of large requests or Expect: 100-continue if client then -- FIXME handle '100 Continue' response (by skipping it) if not have_body then len = 0; end @@ -126,9 +124,17 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb) body_sink = nil; }; end - if chunked then + if len and len > bodylimit then + -- Early notification, for redirection + success_cb(packet); + if not packet.body_sink then error = true; return error_cb("content-length-limit-exceeded"); end + end + if chunked and not packet.body_sink then + success_cb(packet); + if not packet.body_sink then packet.body_buffer = dbuffer.new(buflimit); end + end state = true; end if state then -- read body @@ -154,11 +160,23 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb) success_cb(packet); elseif buffer:length() - chunk_start - 2 >= chunk_size then -- we have a chunk buffer:discard(chunk_start - 1); -- TODO verify that it's not off-by-one - packet.body_buffer:write(buffer:read(chunk_size)); + (packet.body_sink or packet.body_buffer):write(buffer:read(chunk_size)); buffer:discard(2); -- CRLF else -- Partial chunk remaining break; end + elseif packet.body_sink then + local chunk = buffer:read_chunk(len); + while chunk and len > 0 do + if packet.body_sink:write(chunk) then + len = len - #chunk; + chunk = buffer:read_chunk(len); + else + error = true; + return error_cb("body-sink-write-failure"); + end + end + if len == 0 then state = nil; success_cb(packet); end elseif buffer:length() >= len then assert(not chunked) packet.body = buffer:read(len) or ""; |