Changeset

12890:05b3b3dc7326

Merge 0.12->trunk
author Matthew Wild <mwild1@gmail.com>
date Fri, 17 Feb 2023 17:02:41 +0000
parents 12888:b8504b71735d (current diff) 12889:94a99330ce87 (diff)
children 12891:93ce4244d433
files
diffstat 2 files changed, 53 insertions(+), 5 deletions(-) [+]
line wrap: on
line diff
--- a/net/http/parser.lua	Thu Feb 16 20:15:39 2023 +0100
+++ b/net/http/parser.lua	Fri Feb 17 17:02:41 2023 +0000
@@ -153,6 +153,7 @@
 						if not chunk_size then return; end
 						chunk_size = chunk_size and tonumber(chunk_size, 16);
 						if not chunk_size then error = true; return error_cb("invalid-chunk-size"); end
+
 						if chunk_size == 0 and chunk_header:find("\r\n\r\n", chunk_start-2, true) then
 							local body_buffer = packet.body_buffer;
 							if body_buffer then
@@ -168,8 +169,8 @@
 							state, chunked = nil, nil;
 							packet.partial = nil;
 							success_cb(packet);
-						elseif buffer:length() - chunk_start - 2 >= chunk_size then -- we have a chunk
-							buffer:discard(chunk_start - 1); -- TODO verify that it's not off-by-one
+						elseif buffer:length() - chunk_start - 1 >= chunk_size then -- we have a chunk
+							buffer:discard(chunk_start - 1);
 							(packet.body_sink or packet.body_buffer):write(buffer:read(chunk_size));
 							buffer:discard(2); -- CRLF
 						else -- Partial chunk remaining
--- a/spec/net_http_parser_spec.lua	Thu Feb 16 20:15:39 2023 +0100
+++ b/spec/net_http_parser_spec.lua	Fri Feb 17 17:02:41 2023 +0000
@@ -8,18 +8,48 @@
 end
 
 local function test_stream(stream, expect)
+	local chunks_processed = 0;
 	local success_cb = spy.new(function (packet)
 		assert.is_table(packet);
 		if packet.body ~= false then
 			assert.is_equal(expect.body, packet.body);
 		end
+		if expect.chunks then
+			if chunks_processed == 0 then
+				assert.is_true(packet.partial);
+				packet.body_sink = {
+					write = function (_, data)
+						chunks_processed = chunks_processed + 1;
+						assert.equal(expect.chunks[chunks_processed], data);
+						return true;
+					end;
+				};
+			end
+		end
 	end);
 
-	local parser = http_parser.new(success_cb, error, stream:sub(1,4) == "HTTP" and "client" or "server")
-	for chunk in stream:gmatch("."..string.rep(".?", parser_input_bytes-1)) do
-		parser:feed(chunk);
+	local function options_cb()
+		return {
+			-- Force streaming API mode
+			body_size_limit = expect.chunks and 0 or nil;
+			buffer_size_limit = 10*1024*2;
+		};
 	end
 
+	local parser = http_parser.new(success_cb, error, (stream[1] or stream):sub(1,4) == "HTTP" and "client" or "server", options_cb)
+	if type(stream) == "string" then
+		for chunk in stream:gmatch("."..string.rep(".?", parser_input_bytes-1)) do
+			parser:feed(chunk);
+		end
+	else
+		for _, chunk in ipairs(stream) do
+			parser:feed(chunk);
+		end
+	end
+
+	if expect.chunks then
+		assert.equal(chunks_processed, #expect.chunks);
+	end
 	assert.spy(success_cb).was_called(expect.count or 1);
 end
 
@@ -120,6 +150,23 @@
 				}
 			);
 		end);
+
+		it("should correctly find chunk boundaries", function ()
+			test_stream({
+
+CRLF[[
+HTTP/1.1 200 OK
+Transfer-Encoding: chunked
+
+]].."3\r\n:)\n\r\n"},
+				{
+					count = 1; -- Once (partial)
+					chunks = {
+						":)\n"
+					};
+				}
+			);
+		end);
 	end);
 
 	it("should handle large chunked responses", function ()