aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--net/http/parser.lua5
-rw-r--r--spec/net_http_parser_spec.lua53
2 files changed, 53 insertions, 5 deletions
diff --git a/net/http/parser.lua b/net/http/parser.lua
index b95d9017..a6624662 100644
--- a/net/http/parser.lua
+++ b/net/http/parser.lua
@@ -153,6 +153,7 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb)
if not chunk_size then return; end
chunk_size = chunk_size and tonumber(chunk_size, 16);
if not chunk_size then error = true; return error_cb("invalid-chunk-size"); end
+
if chunk_size == 0 and chunk_header:find("\r\n\r\n", chunk_start-2, true) then
local body_buffer = packet.body_buffer;
if body_buffer then
@@ -168,8 +169,8 @@ function httpstream.new(success_cb, error_cb, parser_type, options_cb)
state, chunked = nil, nil;
packet.partial = nil;
success_cb(packet);
- elseif buffer:length() - chunk_start - 2 >= chunk_size then -- we have a chunk
- buffer:discard(chunk_start - 1); -- TODO verify that it's not off-by-one
+ elseif buffer:length() - chunk_start - 1 >= chunk_size then -- we have a chunk
+ buffer:discard(chunk_start - 1);
(packet.body_sink or packet.body_buffer):write(buffer:read(chunk_size));
buffer:discard(2); -- CRLF
else -- Partial chunk remaining
diff --git a/spec/net_http_parser_spec.lua b/spec/net_http_parser_spec.lua
index ed70c431..9da8ca73 100644
--- a/spec/net_http_parser_spec.lua
+++ b/spec/net_http_parser_spec.lua
@@ -8,18 +8,48 @@ local function CRLF(s)
end
local function test_stream(stream, expect)
+ local chunks_processed = 0;
local success_cb = spy.new(function (packet)
assert.is_table(packet);
if packet.body ~= false then
assert.is_equal(expect.body, packet.body);
end
+ if expect.chunks then
+ if chunks_processed == 0 then
+ assert.is_true(packet.partial);
+ packet.body_sink = {
+ write = function (_, data)
+ chunks_processed = chunks_processed + 1;
+ assert.equal(expect.chunks[chunks_processed], data);
+ return true;
+ end;
+ };
+ end
+ end
end);
- local parser = http_parser.new(success_cb, error, stream:sub(1,4) == "HTTP" and "client" or "server")
- for chunk in stream:gmatch("."..string.rep(".?", parser_input_bytes-1)) do
- parser:feed(chunk);
+ local function options_cb()
+ return {
+ -- Force streaming API mode
+ body_size_limit = expect.chunks and 0 or nil;
+ buffer_size_limit = 10*1024*2;
+ };
+ end
+
+ local parser = http_parser.new(success_cb, error, (stream[1] or stream):sub(1,4) == "HTTP" and "client" or "server", options_cb)
+ if type(stream) == "string" then
+ for chunk in stream:gmatch("."..string.rep(".?", parser_input_bytes-1)) do
+ parser:feed(chunk);
+ end
+ else
+ for _, chunk in ipairs(stream) do
+ parser:feed(chunk);
+ end
end
+ if expect.chunks then
+ assert.equal(chunks_processed, #expect.chunks);
+ end
assert.spy(success_cb).was_called(expect.count or 1);
end
@@ -120,6 +150,23 @@ o
}
);
end);
+
+ it("should correctly find chunk boundaries", function ()
+ test_stream({
+
+CRLF[[
+HTTP/1.1 200 OK
+Transfer-Encoding: chunked
+
+]].."3\r\n:)\n\r\n"},
+ {
+ count = 1; -- Once (partial)
+ chunks = {
+ ":)\n"
+ };
+ }
+ );
+ end);
end);
it("should handle large chunked responses", function ()