BUG/MEDIUM: http/compression: Fix how chunked data are copied during the HTTP body parsing
When the compression is enable on HTTP responses, the chunked data are copied in
a temporary buffer during the HTTP body parsing and then compressed when
everything is forwarded to the client. But the amout of data that can be copied
was not correctly calculated. In many cases, it worked, else on the edge when
the channel buffer was almost full.
[wt: bug introduced by b77c5c26 in 1.7-dev, no backport needed]
diff --git a/src/flt_http_comp.c b/src/flt_http_comp.c
index 9ddc858..249ccdf 100644
--- a/src/flt_http_comp.c
+++ b/src/flt_http_comp.c
@@ -177,15 +177,17 @@
}
if (msg->flags & HTTP_MSGF_TE_CHNK) {
- int block = bi_contig_data(buf);
+ int block;
len = MIN(tmpbuf->size - buffer_len(tmpbuf), len);
- if (len > block) {
- memcpy(bi_end(tmpbuf), b_ptr(buf, *nxt), block);
- memcpy(bi_end(tmpbuf)+block, buf->data, len - block);
- }
- else
- memcpy(bi_end(tmpbuf), b_ptr(buf, *nxt), len);
+
+ b_adv(buf, *nxt);
+ block = bi_contig_data(buf);
+ memcpy(bi_end(tmpbuf), bi_ptr(buf), block);
+ if (len > block)
+ memcpy(bi_end(tmpbuf)+block, buf->data, len-block);
+ b_rew(buf, *nxt);
+
tmpbuf->i += len;
ret = len;
}