summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDaniel Stenberg <daniel@haxx.se>2018-02-16 09:49:33 +0100
committerDaniel Stenberg <daniel@haxx.se>2018-02-16 09:50:03 +0100
commite9ec2357f4693da86ac28f510370ecd1ddd221ef (patch)
tree1c06f96891fb4cdde67ce2f4fbe8f88bf4fcf228
parent9d479782885c91dcc20b2cd8a25ec55150e9f1bb (diff)
downloadcurl-bagder/tests-long-headers.tar.gz
fixup http: fix the max header length detection logicbagder/tests-long-headers
as verified by test 1154
-rw-r--r--lib/http.c21
1 files changed, 10 insertions, 11 deletions
diff --git a/lib/http.c b/lib/http.c
index f44b18ae9..c1c7b3908 100644
--- a/lib/http.c
+++ b/lib/http.c
@@ -2880,20 +2880,19 @@ static CURLcode header_append(struct Curl_easy *data,
struct SingleRequest *k,
size_t length)
{
- if(k->hbuflen + length >= data->state.headersize) {
+ size_t newsize = k->hbuflen + length;
+ if(newsize > CURL_MAX_HTTP_HEADER) {
+ /* The reason to have a max limit for this is to avoid the risk of a bad
+ server feeding libcurl with a never-ending header that will cause
+ reallocs infinitely */
+ failf(data, "Rejected %zd bytes header (max is %d)!", newsize,
+ CURL_MAX_HTTP_HEADER);
+ return CURLE_OUT_OF_MEMORY;
+ }
+ if(newsize >= data->state.headersize) {
/* We enlarge the header buffer as it is too small */
char *newbuff;
size_t hbufp_index;
- size_t newsize;
-
- if(k->hbuflen + length > CURL_MAX_HTTP_HEADER) {
- /* The reason to have a max limit for this is to avoid the risk of a bad
- server feeding libcurl with a never-ending header that will cause
- reallocs infinitely */
- failf(data, "Avoided giant realloc for header (max is %d)!",
- CURL_MAX_HTTP_HEADER);
- return CURLE_OUT_OF_MEMORY;
- }
newsize = CURLMAX((k->hbuflen + length) * 3 / 2, data->state.headersize*2);
hbufp_index = k->hbufp - data->state.headerbuff;