Fix chunked transfer encoding

This commit is contained in:
jaina heartles 2022-11-07 00:25:33 -08:00
parent d2151ae326
commit cbf98c1cf3
2 changed files with 27 additions and 18 deletions

View file

@ -148,11 +148,10 @@ fn prepareBody(headers: Fields, reader: anytype) !?TransferStream(@TypeOf(reader
},
.chunked => {
if (headers.get("Content-Length") != null) return error.BadRequest;
std.log.debug("Using chunked T-E", .{});
return TransferStream(@TypeOf(reader)){
.underlying = .{
.chunked = ChunkedStream(@TypeOf(reader)){
.underlying = reader,
},
.chunked = try ChunkedStream(@TypeOf(reader)).init(reader),
},
};
},
@ -163,29 +162,35 @@ fn ChunkedStream(comptime R: type) type {
return struct {
const Self = @This();
remaining: ?usize = null,
remaining: ?usize = 0,
underlying: R,
const Error = R.Error || error{ Unexpected, InvalidChunkHeader, StreamTooLong, EndOfStream };
fn init(reader: R) !Self {
var self: Self = .{ .underlying = reader };
return self;
}
fn read(self: *Self, buf: []u8) !usize {
if (self.remaining) |*remaining| {
var count: usize = 0;
while (count < buf.len) {
const max_read = std.math.min(buf.len, remaining.*);
const amt = try self.underlying.read(buf[count .. count + max_read]);
if (amt != max_read) return error.EndOfStream;
count += amt;
remaining.* -= amt;
if (count == buf.len) return count;
var count: usize = 0;
while (true) {
if (count == buf.len) return count;
if (self.remaining == null) return count;
if (self.remaining.? == 0) self.remaining = try self.readChunkHeader();
const max_read = std.math.min(buf.len, self.remaining.?);
const amt = try self.underlying.read(buf[count .. count + max_read]);
if (amt != max_read) return error.EndOfStream;
count += amt;
self.remaining.? -= amt;
if (self.remaining.? == 0) {
var crlf: [2]u8 = undefined;
_ = try self.underlying.readUntilDelimiter(&crlf, '\n');
self.remaining = try self.readChunkHeader();
}
} else {
return 0;
}
unreachable;
if (count == buf.len) return count;
}
}
fn readChunkHeader(self: *Self) !?usize {
@ -194,10 +199,13 @@ fn ChunkedStream(comptime R: type) type {
const line = self.underlying.readUntilDelimiter(&buf, '\n') catch |err| {
return if (err == error.StreamTooLong) error.InvalidChunkHeader else err;
};
std.log.debug("{}: {s}", .{ line.len, line });
if (line.len < 2 or line[line.len - 1] != '\r') return error.InvalidChunkHeader;
const size = std.fmt.parseInt(usize, line[0 .. line.len - 1], 16) catch return error.InvalidChunkHeader;
std.log.debug("Got chunk header of size {}: {s}", .{ size, line });
return if (size != 0) size else null;
}
};

View file

@ -150,6 +150,7 @@ pub fn Context(comptime Route: type) type {
if (Body != void) {
var stream = req.body orelse return error.NoBody;
const body = try stream.reader().readAllAlloc(self.allocator, 1 << 16);
std.log.debug("{s}", .{body});
errdefer self.allocator.free(body);
self.body = try json_utils.parse(Body, body, self.allocator);
self.body_buf = body;