mirror of
https://gitlab.isc.org/isc-projects/bind9
synced 2025-09-01 15:05:23 +00:00
Merge branch '3463-httpd.c-non-empty-post-requests-bugfix' into 'main'
Fix statistics channel multiple request processing with non-empty HTTP bodies Closes #3463 See merge request isc-projects/bind9!6597
This commit is contained in:
4
CHANGES
4
CHANGES
@@ -1,3 +1,7 @@
|
|||||||
|
5946. [bug] Fix statistics channel's handling of multiple HTTP
|
||||||
|
requests in a single connection which have non-empty
|
||||||
|
request bodies. [GL #3463]
|
||||||
|
|
||||||
5945. [bug] If parsing /etc/bind.key failed, delv could assert
|
5945. [bug] If parsing /etc/bind.key failed, delv could assert
|
||||||
when trying to parse the built in trust anchors as
|
when trying to parse the built in trust anchors as
|
||||||
the parser hadn't been reset. [GL !6468]
|
the parser hadn't been reset. [GL !6468]
|
||||||
|
@@ -110,8 +110,8 @@ if [ $PERL_JSON ]; then
|
|||||||
[ "$noerror_count" -eq "$json_noerror_count" ] || ret=1
|
[ "$noerror_count" -eq "$json_noerror_count" ] || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
ret=0
|
ret=0
|
||||||
echo_i "checking malloced memory statistics xml/json ($n)"
|
echo_i "checking malloced memory statistics xml/json ($n)"
|
||||||
@@ -131,8 +131,8 @@ if [ $PERL_JSON ]; then
|
|||||||
grep '"Malloced":[0-9][0-9]*,' json.mem > /dev/null || ret=1
|
grep '"Malloced":[0-9][0-9]*,' json.mem > /dev/null || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
echo_i "checking consistency between regular and compressed output ($n)"
|
echo_i "checking consistency between regular and compressed output ($n)"
|
||||||
for i in 1 2 3 4 5; do
|
for i in 1 2 3 4 5; do
|
||||||
@@ -158,8 +158,8 @@ for i in 1 2 3 4 5; do
|
|||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
ret=0
|
ret=0
|
||||||
echo_i "checking if compressed output is really compressed ($n)"
|
echo_i "checking if compressed output is really compressed ($n)"
|
||||||
@@ -169,15 +169,15 @@ then
|
|||||||
grep -i Content-Length | sed -e "s/.*: \([0-9]*\).*/\1/"`
|
grep -i Content-Length | sed -e "s/.*: \([0-9]*\).*/\1/"`
|
||||||
COMPSIZE=`cat compressed.headers | \
|
COMPSIZE=`cat compressed.headers | \
|
||||||
grep -i Content-Length | sed -e "s/.*: \([0-9]*\).*/\1/"`
|
grep -i Content-Length | sed -e "s/.*: \([0-9]*\).*/\1/"`
|
||||||
if [ ! `expr $REGSIZE / $COMPSIZE` -gt 2 ]; then
|
if [ ! $((REGSIZE / COMPSIZE)) -gt 2 ]; then
|
||||||
ret=1
|
ret=1
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo_i "skipped"
|
echo_i "skipped"
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test dnssec sign statistics.
|
# Test dnssec sign statistics.
|
||||||
zone="dnssec"
|
zone="dnssec"
|
||||||
@@ -208,8 +208,8 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test sign operations after dynamic update.
|
# Test sign operations after dynamic update.
|
||||||
ret=0
|
ret=0
|
||||||
@@ -238,8 +238,8 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test sign operations of KSK.
|
# Test sign operations of KSK.
|
||||||
ret=0
|
ret=0
|
||||||
@@ -265,8 +265,8 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test sign operations for scheduled resigning (many keys).
|
# Test sign operations for scheduled resigning (many keys).
|
||||||
ret=0
|
ret=0
|
||||||
@@ -306,8 +306,8 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test sign operations after dynamic update (many keys).
|
# Test sign operations after dynamic update (many keys).
|
||||||
ret=0
|
ret=0
|
||||||
@@ -344,8 +344,8 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
# Test sign operations after dnssec-policy change (removing keys).
|
# Test sign operations after dnssec-policy change (removing keys).
|
||||||
ret=0
|
ret=0
|
||||||
@@ -373,11 +373,11 @@ if [ $PERL_JSON ]; then
|
|||||||
cmp zones.out.j$n zones.expect.$n || ret=1
|
cmp zones.out.j$n zones.expect.$n || ret=1
|
||||||
fi
|
fi
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
if [ -x "${NC}" ] ; then
|
if [ -x "${NC}" ] ; then
|
||||||
echo_i "Check HTTP/1.1 pipelined requests are handled ($n)"
|
echo_i "Check HTTP/1.1 pipelined requests are handled (GET) ($n)"
|
||||||
ret=0
|
ret=0
|
||||||
${NC} 10.53.0.3 ${EXTRAPORT1} << EOF > nc.out$n || ret=1
|
${NC} 10.53.0.3 ${EXTRAPORT1} << EOF > nc.out$n || ret=1
|
||||||
GET /xml/v3/status HTTP/1.1
|
GET /xml/v3/status HTTP/1.1
|
||||||
@@ -391,8 +391,35 @@ EOF
|
|||||||
lines=$(grep "^HTTP/1.1" nc.out$n | wc -l)
|
lines=$(grep "^HTTP/1.1" nc.out$n | wc -l)
|
||||||
test $lines = 2 || ret=1
|
test $lines = 2 || ret=1
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
else
|
||||||
|
echo_i "skipping test as nc not found"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -x "${NC}" ] ; then
|
||||||
|
echo_i "Check HTTP/1.1 pipelined requests are handled (POST) ($n)"
|
||||||
|
ret=0
|
||||||
|
${NC} 10.53.0.3 ${EXTRAPORT1} << EOF > nc.out$n || ret=1
|
||||||
|
POST /xml/v3/status HTTP/1.1
|
||||||
|
Host: 10.53.0.3:${EXTRAPORT1}
|
||||||
|
Content-Type: application/json
|
||||||
|
Content-Length: 3
|
||||||
|
|
||||||
|
{}
|
||||||
|
POST /xml/v3/status HTTP/1.1
|
||||||
|
Host: 10.53.0.3:${EXTRAPORT1}
|
||||||
|
Content-Type: application/json
|
||||||
|
Content-Length: 3
|
||||||
|
Connection: close
|
||||||
|
|
||||||
|
{}
|
||||||
|
EOF
|
||||||
|
lines=$(grep "^HTTP/1.1" nc.out$n | wc -l)
|
||||||
|
test $lines = 2 || ret=1
|
||||||
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
|
status=$((status + ret))
|
||||||
|
n=$((n + 1))
|
||||||
else
|
else
|
||||||
echo_i "skipping test as nc not found"
|
echo_i "skipping test as nc not found"
|
||||||
fi
|
fi
|
||||||
@@ -421,8 +448,8 @@ test $((time2 - time1)) -lt 5 || ret=1
|
|||||||
lines=$(grep "^HTTP/1.1" send.out$n | wc -l)
|
lines=$(grep "^HTTP/1.1" send.out$n | wc -l)
|
||||||
test $lines = 91 || ret=1
|
test $lines = 91 || ret=1
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
status=`expr $status + $ret`
|
status=$((status + ret))
|
||||||
n=`expr $n + 1`
|
n=$((n + 1))
|
||||||
|
|
||||||
echo_i "exit status: $status"
|
echo_i "exit status: $status"
|
||||||
[ $status -eq 0 ] || exit 1
|
[ $status -eq 0 ] || exit 1
|
||||||
|
@@ -319,10 +319,12 @@ destroy_httpdmgr(isc_httpdmgr_t *httpdmgr) {
|
|||||||
/*
|
/*
|
||||||
* Look for the given header in headers.
|
* Look for the given header in headers.
|
||||||
* If value is specified look for it terminated with a character in eov.
|
* If value is specified look for it terminated with a character in eov.
|
||||||
|
* If fvalue is specified and the header was found, then *fvalue will point to
|
||||||
|
* the found header's value.
|
||||||
*/
|
*/
|
||||||
static bool
|
static bool
|
||||||
have_header(isc_httpd_t *httpd, const char *header, const char *value,
|
have_header(isc_httpd_t *httpd, const char *header, const char *value,
|
||||||
const char *eov) {
|
const char *eov, const char **fvalue) {
|
||||||
char *cr, *nl, *h;
|
char *cr, *nl, *h;
|
||||||
size_t hlen, vlen = 0;
|
size_t hlen, vlen = 0;
|
||||||
|
|
||||||
@@ -356,10 +358,6 @@ have_header(isc_httpd_t *httpd, const char *header, const char *value,
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (value == NULL) {
|
|
||||||
return (true);
|
|
||||||
}
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Skip optional leading white space.
|
* Skip optional leading white space.
|
||||||
*/
|
*/
|
||||||
@@ -367,6 +365,18 @@ have_header(isc_httpd_t *httpd, const char *header, const char *value,
|
|||||||
while (*h == ' ' || *h == '\t') {
|
while (*h == ' ' || *h == '\t') {
|
||||||
h++;
|
h++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Set the found value.
|
||||||
|
*/
|
||||||
|
if (fvalue != NULL) {
|
||||||
|
*fvalue = h;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value == NULL) {
|
||||||
|
return (true);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Terminate token search on NULL or EOL.
|
* Terminate token search on NULL or EOL.
|
||||||
*/
|
*/
|
||||||
@@ -398,8 +408,10 @@ have_header(isc_httpd_t *httpd, const char *header, const char *value,
|
|||||||
static isc_result_t
|
static isc_result_t
|
||||||
process_request(isc_httpd_t *httpd, isc_region_t *region, size_t *buflen) {
|
process_request(isc_httpd_t *httpd, isc_region_t *region, size_t *buflen) {
|
||||||
char *s = NULL, *p = NULL, *urlend = NULL;
|
char *s = NULL, *p = NULL, *urlend = NULL;
|
||||||
|
const char *content_length = NULL;
|
||||||
size_t limit = sizeof(httpd->recvbuf) - httpd->recvlen - 1;
|
size_t limit = sizeof(httpd->recvbuf) - httpd->recvlen - 1;
|
||||||
size_t len = region->length;
|
size_t len = region->length;
|
||||||
|
size_t clen = 0;
|
||||||
int delim;
|
int delim;
|
||||||
bool truncated = false;
|
bool truncated = false;
|
||||||
|
|
||||||
@@ -556,17 +568,40 @@ process_request(isc_httpd_t *httpd, isc_region_t *region, size_t *buflen) {
|
|||||||
|
|
||||||
httpd->headers = s;
|
httpd->headers = s;
|
||||||
|
|
||||||
if (have_header(httpd, "Connection:", "close", ", \t\r\n")) {
|
if (!have_header(httpd, "Content-Length:", NULL, NULL, &content_length))
|
||||||
|
{
|
||||||
|
/* Require a Content-Length header for POST requests. */
|
||||||
|
if (httpd->method == METHOD_POST) {
|
||||||
|
return (ISC_R_BADNUMBER);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
INSIST(content_length != NULL);
|
||||||
|
|
||||||
|
clen = (size_t)strtoul(content_length, NULL, 10);
|
||||||
|
if (clen == ULONG_MAX) {
|
||||||
|
/* Invalid number in the header value. */
|
||||||
|
return (ISC_R_BADNUMBER);
|
||||||
|
}
|
||||||
|
if (httpd->recvlen < httpd->consume + clen) {
|
||||||
|
/* The request data isn't complete yet. */
|
||||||
|
return (ISC_R_NOTFOUND);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Consume the request's data, which we do not use. */
|
||||||
|
httpd->consume += clen;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (have_header(httpd, "Connection:", "close", ", \t\r\n", NULL)) {
|
||||||
httpd->flags |= HTTPD_CLOSE;
|
httpd->flags |= HTTPD_CLOSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (have_header(httpd, "Host:", NULL, NULL)) {
|
if (have_header(httpd, "Host:", NULL, NULL, NULL)) {
|
||||||
httpd->flags |= HTTPD_FOUNDHOST;
|
httpd->flags |= HTTPD_FOUNDHOST;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (strncmp(httpd->protocol, "HTTP/1.0", 8) == 0) {
|
if (strncmp(httpd->protocol, "HTTP/1.0", 8) == 0) {
|
||||||
if (have_header(httpd, "Connection:", "Keep-Alive", ", \t\r\n"))
|
if (have_header(httpd, "Connection:", "Keep-Alive", ", \t\r\n",
|
||||||
{
|
NULL)) {
|
||||||
httpd->flags |= HTTPD_KEEPALIVE;
|
httpd->flags |= HTTPD_KEEPALIVE;
|
||||||
} else {
|
} else {
|
||||||
httpd->flags |= HTTPD_CLOSE;
|
httpd->flags |= HTTPD_CLOSE;
|
||||||
@@ -577,7 +612,8 @@ process_request(isc_httpd_t *httpd, isc_region_t *region, size_t *buflen) {
|
|||||||
* Check for Accept-Encoding:
|
* Check for Accept-Encoding:
|
||||||
*/
|
*/
|
||||||
#ifdef HAVE_ZLIB
|
#ifdef HAVE_ZLIB
|
||||||
if (have_header(httpd, "Accept-Encoding:", "deflate", ";, \t\r\n")) {
|
if (have_header(httpd, "Accept-Encoding:", "deflate", ";, \t\r\n",
|
||||||
|
NULL)) {
|
||||||
httpd->flags |= HTTPD_ACCEPT_DEFLATE;
|
httpd->flags |= HTTPD_ACCEPT_DEFLATE;
|
||||||
}
|
}
|
||||||
#endif /* ifdef HAVE_ZLIB */
|
#endif /* ifdef HAVE_ZLIB */
|
||||||
|
Reference in New Issue
Block a user