aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorMatthew Wild <mwild1@gmail.com>2020-08-24 16:18:13 +0100
committerMatthew Wild <mwild1@gmail.com>2020-08-24 16:18:13 +0100
commit83e593ac2b7f7fa1c23b06b968148a78af8aaed9 (patch)
treee06dc692190209b5efe6242d833742d49260d8bc
parentb0d2bea61ec1f79e193641e229968d9d8fa802d2 (diff)
downloadprosody-83e593ac2b7f7fa1c23b06b968148a78af8aaed9.tar.gz
prosody-83e593ac2b7f7fa1c23b06b968148a78af8aaed9.zip
util.dbuffer: Fix :sub() not working with partially-consumed chunks (thanks Zash for test case)
This also appears to fix some bugs with chunk-encoded streams in net.http.parser.
-rw-r--r--util/dbuffer.lua176
1 files changed, 176 insertions, 0 deletions
diff --git a/util/dbuffer.lua b/util/dbuffer.lua
new file mode 100644
index 00000000..a50f3a64
--- /dev/null
+++ b/util/dbuffer.lua
@@ -0,0 +1,176 @@
+local queue = require "util.queue";
+
+local dbuffer_methods = {};
+local dynamic_buffer_mt = { __index = dbuffer_methods };
+
+function dbuffer_methods:write(data)
+ if self.max_size and #data + self._length > self.max_size then
+ return nil;
+ end
+ local ok = self.items:push(data);
+ if not ok then
+ self:collapse();
+ ok = self.items:push(data);
+ end
+ if not ok then
+ return nil;
+ end
+ self._length = self._length + #data;
+ return true;
+end
+
+function dbuffer_methods:read_chunk(requested_bytes)
+ local chunk, consumed = self.items:peek(), self.front_consumed;
+ if not chunk then return; end
+ local chunk_length = #chunk;
+ local remaining_chunk_length = chunk_length - consumed;
+ if not requested_bytes then
+ requested_bytes = remaining_chunk_length;
+ end
+ if remaining_chunk_length <= requested_bytes then
+ self.front_consumed = 0;
+ self._length = self._length - remaining_chunk_length;
+ self.items:pop();
+ assert(#chunk:sub(consumed + 1, -1) == remaining_chunk_length);
+ return chunk:sub(consumed + 1, -1), remaining_chunk_length;
+ end
+ local end_pos = consumed + requested_bytes;
+ self.front_consumed = end_pos;
+ self._length = self._length - requested_bytes;
+ assert(#chunk:sub(consumed + 1, end_pos) == requested_bytes);
+ return chunk:sub(consumed + 1, end_pos), requested_bytes;
+end
+
+function dbuffer_methods:read(requested_bytes)
+ local chunks;
+
+ if requested_bytes and requested_bytes > self._length then
+ return nil;
+ end
+
+ local chunk, read_bytes = self:read_chunk(requested_bytes);
+ if not requested_bytes then
+ return chunk;
+ elseif chunk then
+ requested_bytes = requested_bytes - read_bytes;
+ if requested_bytes == 0 then -- Already read everything we need
+ return chunk;
+ end
+ chunks = {};
+ else
+ return nil;
+ end
+
+ -- Need to keep reading more chunks
+ while chunk do
+ table.insert(chunks, chunk);
+ if requested_bytes > 0 then
+ chunk, read_bytes = self:read_chunk(requested_bytes);
+ requested_bytes = requested_bytes - read_bytes;
+ else
+ break;
+ end
+ end
+
+ return table.concat(chunks);
+end
+
+function dbuffer_methods:discard(requested_bytes)
+ if requested_bytes > self._length then
+ return nil;
+ end
+
+ local chunk, read_bytes = self:read_chunk(requested_bytes);
+ if chunk then
+ requested_bytes = requested_bytes - read_bytes;
+ if requested_bytes == 0 then -- Already read everything we need
+ return true;
+ end
+ else
+ return nil;
+ end
+
+ while chunk do
+ if requested_bytes > 0 then
+ chunk, read_bytes = self:read_chunk(requested_bytes);
+ requested_bytes = requested_bytes - read_bytes;
+ else
+ break;
+ end
+ end
+ return true;
+end
+
+function dbuffer_methods:sub(i, j)
+ if j == nil then
+ j = -1;
+ end
+ if j < 0 then
+ j = self._length + (j+1);
+ end
+ if i < 0 then
+ i = self._length + (i+1);
+ end
+ if i < 1 then
+ i = 1;
+ end
+ if j > self._length then
+ j = self._length;
+ end
+ if i > j then
+ return "";
+ end
+
+ self:collapse(j);
+
+ return self.items:peek():sub(self.front_consumed+1):sub(i, j);
+end
+
+function dbuffer_methods:byte(i, j)
+ i = i or 1;
+ j = j or i;
+ return string.byte(self:sub(i, j), 1, -1);
+end
+
+function dbuffer_methods:length()
+ return self._length;
+end
+dynamic_buffer_mt.__len = dbuffer_methods.length; -- support # operator
+
+function dbuffer_methods:collapse(bytes)
+ bytes = bytes or self._length;
+
+ local front_chunk = self.items:peek();
+
+ if not front_chunk or #front_chunk - self.front_consumed >= bytes then
+ return;
+ end
+
+ local front_chunks = { front_chunk:sub(self.front_consumed+1) };
+ local front_bytes = #front_chunks[1];
+
+ while front_bytes < bytes do
+ self.items:pop();
+ local chunk = self.items:peek();
+ front_bytes = front_bytes + #chunk;
+ table.insert(front_chunks, chunk);
+ end
+ self.items:replace(table.concat(front_chunks));
+ self.front_consumed = 0;
+end
+
+local function new(max_size, max_chunks)
+ if max_size and max_size <= 0 then
+ return nil;
+ end
+ return setmetatable({
+ front_consumed = 0;
+ _length = 0;
+ max_size = max_size;
+ items = queue.new(max_chunks or 32);
+ }, dynamic_buffer_mt);
+end
+
+return {
+ new = new;
+};