diff options
author | Daniel Stenberg <daniel@haxx.se> | 2017-09-09 23:09:06 +0200 |
---|---|---|
committer | Daniel Stenberg <daniel@haxx.se> | 2017-09-11 09:29:50 +0200 |
commit | 6b84438d9a9220fb75cbaae9d6fe6c3edb6d425e (patch) | |
tree | 109c29611f5bd2dbedab015b45524e8ffe6e1057 /lib/http.c | |
parent | e155f38d1eaa89cc8ce2a6536b74be2954506bb0 (diff) | |
download | curl-6b84438d9a9220fb75cbaae9d6fe6c3edb6d425e.tar.gz |
code style: use spaces around equals signs
Diffstat (limited to 'lib/http.c')
-rw-r--r-- | lib/http.c | 42 |
1 files changed, 21 insertions, 21 deletions
diff --git a/lib/http.c b/lib/http.c index e7f6872d5..025cb2da1 100644 --- a/lib/http.c +++ b/lib/http.c @@ -190,7 +190,7 @@ char *Curl_checkProxyheaders(const struct connectdata *conn, for(head = (conn->bits.proxy && data->set.sep_headers) ? data->set.proxyheaders : data->set.headers; - head; head=head->next) { + head; head = head->next) { if(strncasecompare(head->data, thisheader, thislen)) return head->data; } @@ -592,7 +592,7 @@ output_auth_headers(struct connectdata *conn, #endif #if defined(USE_NTLM) && defined(NTLM_WB_ENABLED) if(authstatus->picked == CURLAUTH_NTLM_WB) { - auth="NTLM_WB"; + auth = "NTLM_WB"; result = Curl_output_ntlm_wb(conn, proxy); if(result) return result; @@ -1004,7 +1004,7 @@ static size_t readmoredata(char *buffer, http->sending++; /* move one step up */ - http->backup.postsize=0; + http->backup.postsize = 0; } else http->postsize = 0; @@ -1321,7 +1321,7 @@ Curl_compareheader(const char *headerline, /* line to check */ clen = strlen(content); /* length of the word to find */ /* find the content string in the rest of the line */ - for(;len>=clen;len--, start++) { + for(;len >= clen;len--, start++) { if(strncasecompare(start, content, clen)) return TRUE; /* match! */ } @@ -1562,7 +1562,7 @@ CURLcode Curl_add_custom_headers(struct connectdata *conn, char *ptr; struct curl_slist *h[2]; struct curl_slist *headers; - int numlists=1; /* by default */ + int numlists = 1; /* by default */ struct Curl_easy *data = conn->data; int i; @@ -1594,7 +1594,7 @@ CURLcode Curl_add_custom_headers(struct connectdata *conn, } /* loop through one or two lists */ - for(i=0; i < numlists; i++) { + for(i = 0; i < numlists; i++) { headers = h[i]; while(headers) { @@ -1859,7 +1859,7 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) here. */ if(Curl_checkheaders(conn, "User-Agent:")) { free(conn->allocptr.uagent); - conn->allocptr.uagent=NULL; + conn->allocptr.uagent = NULL; } /* setup the authentication headers */ @@ -2184,7 +2184,7 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) } if(seekerr != CURL_SEEKFUNC_OK) { - curl_off_t passed=0; + curl_off_t passed = 0; if(seekerr != CURL_SEEKFUNC_CANTSEEK) { failf(data, "Could not seek stream"); @@ -2255,7 +2255,7 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) } else if(data->state.resume_from) { /* This is because "resume" was selected */ - curl_off_t total_expected_size= + curl_off_t total_expected_size = data->state.resume_from + data->state.infilesize; conn->allocptr.rangeline = aprintf("Content-Range: bytes %s%" CURL_FORMAT_CURL_OFF_T @@ -2365,8 +2365,8 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) #if !defined(CURL_DISABLE_COOKIES) if(data->cookies || addcookies) { - struct Cookie *co=NULL; /* no cookies from start */ - int count=0; + struct Cookie *co = NULL; /* no cookies from start */ + int count = 0; if(data->cookies) { Curl_share_lock(data, CURL_LOCK_DATA_COOKIE, CURL_LOCK_ACCESS_SINGLE); @@ -2379,7 +2379,7 @@ CURLcode Curl_http(struct connectdata *conn, bool *done) Curl_share_unlock(data, CURL_LOCK_DATA_COOKIE); } if(co) { - struct Cookie *store=co; + struct Cookie *store = co; /* now loop through all cookies that matched */ while(co) { if(co->value) { @@ -2881,14 +2881,14 @@ static CURLcode header_append(struct Curl_easy *data, return CURLE_OUT_OF_MEMORY; } - newsize=CURLMAX((k->hbuflen+ length)*3/2, data->state.headersize*2); + newsize = CURLMAX((k->hbuflen+ length)*3/2, data->state.headersize*2); hbufp_index = k->hbufp - data->state.headerbuff; newbuff = realloc(data->state.headerbuff, newsize); if(!newbuff) { failf(data, "Failed to alloc memory for big header!"); return CURLE_OUT_OF_MEMORY; } - data->state.headersize=newsize; + data->state.headersize = newsize; data->state.headerbuff = newbuff; k->hbufp = data->state.headerbuff + hbufp_index; } @@ -3349,7 +3349,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, separator = ' '; } - if((nc==4) && (' ' == separator)) { + if((nc == 4) && (' ' == separator)) { conn->httpversion += 10 * httpversion_major; if(k->upgr101 == UPGR101_RECEIVED) { @@ -3362,7 +3362,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, /* this is the real world, not a Nirvana NCSA 1.5.x returns this crap when asked for HTTP/1.1 */ - nc=sscanf(HEADER1, " HTTP %3d", &k->httpcode); + nc = sscanf(HEADER1, " HTTP %3d", &k->httpcode); conn->httpversion = 10; /* If user has set option HTTP200ALIASES, @@ -3387,7 +3387,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, &rtspversion_major, &conn->rtspversion, &k->httpcode); - if(nc==3) { + if(nc == 3) { conn->rtspversion += 10 * rtspversion_major; conn->httpversion = 11; /* For us, RTSP acts like HTTP 1.1 */ } @@ -3419,7 +3419,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, ((k->httpcode != 407) || !conn->bits.proxy_user_passwd) ) { if(data->state.resume_from && - (data->set.httpreq==HTTPREQ_GET) && + (data->set.httpreq == HTTPREQ_GET) && (k->httpcode == 416)) { /* "Requested Range Not Satisfiable", just proceed and pretend this is no error */ @@ -3475,8 +3475,8 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, * fields. */ if(data->set.timecondition) data->info.timecond = TRUE; - k->size=0; - k->maxdownload=0; + k->size = 0; + k->maxdownload = 0; k->ignorecl = TRUE; /* ignore Content-Length headers */ break; default: @@ -3724,7 +3724,7 @@ CURLcode Curl_http_readwrite_headers(struct Curl_easy *data, #endif else if(checkprefix("Last-Modified:", k->p) && (data->set.timecondition || data->set.get_filetime) ) { - time_t secs=time(NULL); + time_t secs = time(NULL); k->timeofdoc = curl_getdate(k->p+strlen("Last-Modified:"), &secs); if(data->set.get_filetime) |