Author: markt
Date: Thu Oct 20 11:06:41 2016
New Revision: 1765801
URL: http://svn.apache.org/viewvc?rev=1765801&view=rev
Log:
Implement limits for HTTP/2 header size and count
Added:
tomcat/trunk/test/org/apache/coyote/http2/TestHttp2Limits.java
Modified:
tomcat/trunk/java/org/apache/coyote/http2/Constants.java
tomcat/trunk/java/org/apache/coyote/http2/HpackDecoder.java
tomcat/trunk/java/org/apache/coyote/http2/Http2Parser.java
tomcat/trunk/java/org/apache/coyote/http2/Http2Protocol.java
tomcat/trunk/java/org/apache/coyote/http2/Http2UpgradeHandler.java
tomcat/trunk/java/org/apache/coyote/http2/LocalStrings.properties
tomcat/trunk/webapps/docs/config/http2.xml
Modified: tomcat/trunk/java/org/apache/coyote/http2/Constants.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/Constants.java?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/Constants.java (original)
+++ tomcat/trunk/java/org/apache/coyote/http2/Constants.java Thu Oct 20
11:06:41 2016
@@ -23,4 +23,8 @@ public class Constants {
// Parsing
static final int DEFAULT_HEADER_READ_BUFFER_SIZE = 1024;
+
+ // Limits
+ static final int DEFAULT_MAX_HEADER_COUNT = 100;
+ static final int DEFAULT_MAX_HEADER_SIZE = 8 * 1024;
}
Modified: tomcat/trunk/java/org/apache/coyote/http2/HpackDecoder.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/HpackDecoder.java?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/HpackDecoder.java (original)
+++ tomcat/trunk/java/org/apache/coyote/http2/HpackDecoder.java Thu Oct 20
11:06:41 2016
@@ -61,6 +61,13 @@ public class HpackDecoder {
*/
private int maxMemorySize;
+ private int maxHeaderCount = Constants.DEFAULT_MAX_HEADER_COUNT;
+ private int maxHeaderSize = Constants.DEFAULT_MAX_HEADER_SIZE;
+
+ private volatile int headerCount = 0;
+ private volatile boolean countedCookie;
+ private volatile int headerSize = 0;
+
private final StringBuilder stringBuilder = new StringBuilder();
HpackDecoder(int maxMemorySize) {
@@ -109,7 +116,7 @@ public class HpackDecoder {
buffer.position(originalPos);
return;
}
- headerEmitter.emitHeader(headerName, headerValue);
+ emitHeader(headerName, headerValue);
addEntryToHeaderTable(new Hpack.HeaderField(headerName,
headerValue));
} else if ((b & 0b11110000) == 0) {
//Literal Header Field without Indexing
@@ -123,7 +130,7 @@ public class HpackDecoder {
buffer.position(originalPos);
return;
}
- headerEmitter.emitHeader(headerName, headerValue);
+ emitHeader(headerName, headerValue);
} else if ((b & 0b11110000) == 0b00010000) {
//Literal Header Field never indexed
String headerName = readHeaderName(buffer, 4);
@@ -136,7 +143,7 @@ public class HpackDecoder {
buffer.position(originalPos);
return;
}
- headerEmitter.emitHeader(headerName, headerValue);
+ emitHeader(headerName, headerValue);
} else if ((b & 0b11100000) == 0b00100000) {
//context update max table size change
if (!handleMaxMemorySizeChange(buffer, originalPos)) {
@@ -246,7 +253,7 @@ public class HpackDecoder {
} else {
int adjustedIndex = getRealIndex(index -
Hpack.STATIC_TABLE_LENGTH);
Hpack.HeaderField headerField = headerTable[adjustedIndex];
- headerEmitter.emitHeader(headerField.name, headerField.value);
+ emitHeader(headerField.name, headerField.value);
}
}
@@ -273,7 +280,7 @@ public class HpackDecoder {
if (entry.value == null) {
throw new HpackException();
}
- headerEmitter.emitHeader(entry.name, entry.value);
+ emitHeader(entry.name, entry.value);
}
private void addEntryToHeaderTable(Hpack.HeaderField entry) {
@@ -354,10 +361,74 @@ public class HpackDecoder {
return headerEmitter;
}
+
void setHeaderEmitter(HeaderEmitter headerEmitter) {
this.headerEmitter = headerEmitter;
+ // Reset limit tracking
+ headerCount = 0;
+ countedCookie = false;
+ headerSize = 0;
+ }
+
+
+ void setMaxHeaderCount(int maxHeaderCount) {
+ this.maxHeaderCount = maxHeaderCount;
+ }
+
+
+ void setMaxHeaderSize(int maxHeaderSize) {
+ this.maxHeaderSize = maxHeaderSize;
+ }
+
+
+ private void emitHeader(String name, String value) {
+ // Header names are forced to lower case
+ if ("cookie".equals(name)) {
+ // Only count the cookie header once since HTTP/2 splits it into
+ // multiple headers to aid compression
+ if (!countedCookie) {
+ headerCount ++;
+ countedCookie = true;
+ }
+ } else {
+ headerCount ++;
+ }
+ // Overhead will vary. The main concern is that lots of small headers
+ // trigger the limiting mechanism correctly. Therefore, use an overhead
+ // estimate of 3 which is the worst case for small headers.
+ int inc = 3 + name.length() + value.length();
+ headerSize += inc;
+ if (!isHeaderCountExceeded() && !isHeaderSizeExceeded(0)) {
+ headerEmitter.emitHeader(name, value);
+ }
}
+
+ boolean isHeaderCountExceeded() {
+ if (maxHeaderCount < 0) {
+ return false;
+ }
+ return headerCount > maxHeaderCount;
+ }
+
+
+ boolean isHeaderSizeExceeded(int unreadSize) {
+ if (maxHeaderSize < 0) {
+ return false;
+ }
+ return (headerSize + unreadSize) > maxHeaderSize;
+ }
+
+
+ boolean isHeaderSwallowSizeExceeded(int unreadSize) {
+ if (maxHeaderSize < 0) {
+ return false;
+ }
+ // Swallow the same again before closing the connection.
+ return (headerSize + unreadSize) > (2 * maxHeaderSize);
+ }
+
+
//package private fields for unit tests
int getFirstSlotPosition() {
Modified: tomcat/trunk/java/org/apache/coyote/http2/Http2Parser.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/Http2Parser.java?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/Http2Parser.java (original)
+++ tomcat/trunk/java/org/apache/coyote/http2/Http2Parser.java Thu Oct 20
11:06:41 2016
@@ -45,7 +45,7 @@ class Http2Parser {
ByteBuffer.allocate(Constants.DEFAULT_HEADER_READ_BUFFER_SIZE);
private volatile int headersCurrentStream = -1;
private volatile boolean headersEndStream = false;
-
+ private volatile boolean streamReset = false;
Http2Parser(String connectionId, Input input, Output output) {
this.connectionId = connectionId;
@@ -243,7 +243,7 @@ class Http2Parser {
payloadSize -= padLength;
}
- readHeaderPayload(payloadSize);
+ readHeaderPayload(streamId, payloadSize);
swallow(streamId, padLength, true);
@@ -371,7 +371,7 @@ class Http2Parser {
Integer.toString(streamId)), Http2Error.PROTOCOL_ERROR);
}
- readHeaderPayload(payloadSize);
+ readHeaderPayload(streamId, payloadSize);
if (Flags.isEndOfHeaders(flags)) {
onHeadersComplete(streamId);
@@ -380,7 +380,13 @@ class Http2Parser {
}
- private void readHeaderPayload(int payloadSize) throws Http2Exception,
IOException {
+ private void readHeaderPayload(int streamId, int payloadSize)
+ throws Http2Exception, IOException {
+
+ if (log.isDebugEnabled()) {
+ log.debug(sm.getString("http2Parser.processFrameHeaders.payload",
connectionId,
+ Integer.valueOf(streamId), Integer.valueOf(payloadSize)));
+ }
int remaining = payloadSize;
@@ -411,9 +417,27 @@ class Http2Parser {
sm.getString("http2Parser.processFrameHeaders.decodingFailed"),
Http2Error.COMPRESSION_ERROR);
}
+
// switches to write mode
headerReadBuffer.compact();
remaining -= toRead;
+
+ if (hpackDecoder.isHeaderCountExceeded() && !streamReset) {
+ streamReset = true;
+ throw new
StreamException(sm.getString("http2Parser.headerLimitCount", connectionId,
+ Integer.valueOf(streamId)),
Http2Error.ENHANCE_YOUR_CALM, streamId);
+ }
+
+ if (hpackDecoder.isHeaderSizeExceeded(headerReadBuffer.position())
&& !streamReset) {
+ streamReset = true;
+ throw new
StreamException(sm.getString("http2Parser.headerLimitSize", connectionId,
+ Integer.valueOf(streamId)),
Http2Error.ENHANCE_YOUR_CALM, streamId);
+ }
+
+ if
(hpackDecoder.isHeaderSwallowSizeExceeded(headerReadBuffer.position())) {
+ throw new
ConnectionException(sm.getString("http2Parser.headerLimitSize",
+ connectionId, Integer.valueOf(streamId)),
Http2Error.ENHANCE_YOUR_CALM);
+ }
}
hpackDecoder.getHeaderEmitter().validateHeaders();
@@ -439,6 +463,11 @@ class Http2Parser {
if (headerReadBuffer.capacity() >
Constants.DEFAULT_HEADER_READ_BUFFER_SIZE) {
headerReadBuffer =
ByteBuffer.allocate(Constants.DEFAULT_HEADER_READ_BUFFER_SIZE);
}
+
+ // Clear the 'stream has been reset' flag, if set
+ if (streamReset) {
+ streamReset = false;
+ }
}
Modified: tomcat/trunk/java/org/apache/coyote/http2/Http2Protocol.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/Http2Protocol.java?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/Http2Protocol.java (original)
+++ tomcat/trunk/java/org/apache/coyote/http2/Http2Protocol.java Thu Oct 20
11:06:41 2016
@@ -61,8 +61,11 @@ public class Http2Protocol implements Up
// If a lower initial value is required, set it here but DO NOT change the
// default defined above.
private int initialWindowSize = DEFAULT_INITIAL_WINDOW_SIZE;
+ // Limits
private Set<String> allowedTrailerHeaders =
Collections.newSetFromMap(new ConcurrentHashMap<String,
Boolean>());
+ private int maxHeaderCount = Constants.DEFAULT_MAX_HEADER_COUNT;
+ private int maxHeaderSize = Constants.DEFAULT_MAX_HEADER_SIZE;
@Override
@@ -104,6 +107,8 @@ public class Http2Protocol implements Up
result.setMaxConcurrentStreamExecution(getMaxConcurrentStreamExecution());
result.setInitialWindowSize(getInitialWindowSize());
result.setAllowedTrailerHeaders(allowedTrailerHeaders);
+ result.setMaxHeaderCount(getMaxHeaderCount());
+ result.setMaxHeaderSize(getMaxHeaderSize());
return result;
}
@@ -227,4 +232,24 @@ public class Http2Protocol implements Up
}
return result.toString();
}
+
+
+ public void setMaxHeaderCount(int maxHeaderCount) {
+ this.maxHeaderCount = maxHeaderCount;
+ }
+
+
+ public int getMaxHeaderCount() {
+ return maxHeaderCount;
+ }
+
+
+ public void setMaxHeaderSize(int maxHeaderSize) {
+ this.maxHeaderSize = maxHeaderSize;
+ }
+
+
+ public int getMaxHeaderSize() {
+ return maxHeaderSize;
+ }
}
Modified: tomcat/trunk/java/org/apache/coyote/http2/Http2UpgradeHandler.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/Http2UpgradeHandler.java?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/Http2UpgradeHandler.java
(original)
+++ tomcat/trunk/java/org/apache/coyote/http2/Http2UpgradeHandler.java Thu Oct
20 11:06:41 2016
@@ -1113,6 +1113,16 @@ class Http2UpgradeHandler extends Abstra
}
+ public void setMaxHeaderCount(int maxHeaderCount) {
+ getHpackDecoder().setMaxHeaderCount(maxHeaderCount);
+ }
+
+
+ public void setMaxHeaderSize(int maxHeaderSize) {
+ getHpackDecoder().setMaxHeaderSize(maxHeaderSize);
+ }
+
+
// ----------------------------------------------- Http2Parser.Input
methods
@Override
Modified: tomcat/trunk/java/org/apache/coyote/http2/LocalStrings.properties
URL:
http://svn.apache.org/viewvc/tomcat/trunk/java/org/apache/coyote/http2/LocalStrings.properties?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/java/org/apache/coyote/http2/LocalStrings.properties (original)
+++ tomcat/trunk/java/org/apache/coyote/http2/LocalStrings.properties Thu Oct
20 11:06:41 2016
@@ -39,6 +39,8 @@ hpackEncoder.encodeHeader=Encoding heade
hpackhuffman.huffmanEncodedHpackValueDidNotEndWithEOS=Huffman encoded value in
HPACK headers did not end with EOS padding
+http2Parser.headerLimitCount=Connection [{0}], Stream [{1}], Too many headers
+http2Parser.headerLimitSize=Connection [{0}], Stream [{1}], Total header size
too big
http2Parser.headers.wrongFrameType=Connection [{0}], headers in progress for
stream [{1}] but a frame of type [{2}] was received
http2Parser.headers.wrongStream=Connection [{0}], headers in progress for
stream [{1}] but a frame for stream [{2}] was received
http2Parser.nonZeroPadding=Connection [{0}], Stream [{1}], Non-zero padding
received
@@ -53,6 +55,7 @@ http2Parser.processFrameData.lengths=Con
http2Parser.processFrameGoaway.payloadTooSmall=Connection [{0}]: Goaway
payload size was [{1}] which is less than the minimum 8
http2Parser.processFrameHeaders.decodingFailed=There was an error during the
HPACK decoding of HTTP headers
http2Parser.processFrameHeaders.decodingDataLeft=Data left over after HPACK
decoding - it should have been consumed
+http2Parser.processFrameHeaders.payload=Connection [{0}], Stream [{1}],
Processing headers payload size [{2}]
http2Parser.processFramePing.invalidPayloadSize=Settings frame received with
an invalid payload size of [{0}] (should be 8)
http2Parser.processFramePriority.invalidParent=Connection [{0}], Stream [{1}],
A stream may not depend on itself
http2Parser.processFramePriority.invalidPayloadSize=Priority frame received
with an invalid payload size of [{0}] (should be 5)
Added: tomcat/trunk/test/org/apache/coyote/http2/TestHttp2Limits.java
URL:
http://svn.apache.org/viewvc/tomcat/trunk/test/org/apache/coyote/http2/TestHttp2Limits.java?rev=1765801&view=auto
==============================================================================
--- tomcat/trunk/test/org/apache/coyote/http2/TestHttp2Limits.java (added)
+++ tomcat/trunk/test/org/apache/coyote/http2/TestHttp2Limits.java Thu Oct 20
11:06:41 2016
@@ -0,0 +1,292 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.coyote.http2;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Random;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import org.apache.coyote.http2.HpackEncoder.State;
+import org.apache.tomcat.util.http.MimeHeaders;
+
+public class TestHttp2Limits extends Http2TestBase {
+
+ @Test
+ public void testHeaderLimits1x128() throws Exception {
+ // Well within limits
+ doTestHeaderLimits(1, 128, 0);
+ }
+
+
+ @Test
+ public void testHeaderLimits100x32() throws Exception {
+ // Just within default maxHeaderCount
+ // Note request has 3 standard headers
+ doTestHeaderLimits(97, 32, 0);
+ }
+
+
+ @Test
+ public void testHeaderLimits101x32() throws Exception {
+ // Just above default maxHeaderCount
+ doTestHeaderLimits(98, 32, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits20x32WithLimit10() throws Exception {
+ // Check lower count limit is enforced
+ doTestHeaderLimits(20, 32, -1, 10, Constants.DEFAULT_MAX_HEADER_SIZE,
0, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits8x1001() throws Exception {
+ // Just within default maxHttpHeaderSize
+ // per header overhead plus standard 2 headers
+ doTestHeaderLimits(8, 1001, 0);
+ }
+
+
+ @Test
+ public void testHeaderLimits8x1002() throws Exception {
+ // Just above default maxHttpHeaderSize
+ doTestHeaderLimits(8, 1002, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits3x1024WithLimit2048() throws Exception {
+ // Check lower size limit is enforced
+ doTestHeaderLimits(3, 1024, -1, Constants.DEFAULT_MAX_HEADER_COUNT, 2
* 1024, 0, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x12k() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 12*1024, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x12kin1kChunks() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 12*1024, 1024, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x12kin1kChunksThenNewRequest() throws
Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 12*1024, 1024, 1);
+
+ output.clearTrace();
+ sendSimpleGetRequest(5);
+ parser.readFrame(true);
+ parser.readFrame(true);
+ Assert.assertEquals(getSimpleResponseTrace(5), output.getTrace());
+ }
+
+
+ @Test
+ public void testHeaderLimits1x32k() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 32*1024, 1);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x32kin1kChunks() throws Exception {
+ // Bug 60232
+ // 500ms per frame write delay to give server a chance to process the
+ // stream reset and the connection reset before the request is fully
+ // sent.
+ doTestHeaderLimits(1, 32*1024, 1024, 500, 2);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x128k() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 128*1024, 2);
+ }
+
+
+ @Test
+ public void testHeaderLimits1x512k() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(1, 512*1024, 2);
+ }
+
+
+ @Test
+ public void testHeaderLimits10x512k() throws Exception {
+ // Bug 60232
+ doTestHeaderLimits(10, 512*1024, 2);
+ }
+
+
+ private void doTestHeaderLimits(int headerCount, int headerSize, int
failMode) throws Exception {
+ doTestHeaderLimits(headerCount, headerSize, -1, failMode);
+ }
+
+
+ private void doTestHeaderLimits(int headerCount, int headerSize, int
maxHeaderPayloadSize,
+ int failMode) throws Exception {
+ doTestHeaderLimits(headerCount, headerSize, maxHeaderPayloadSize, 0,
failMode);
+ }
+
+
+ private void doTestHeaderLimits(int headerCount, int headerSize, int
maxHeaderPayloadSize,
+ int delayms, int failMode) throws Exception {
+ doTestHeaderLimits(headerCount, headerSize, maxHeaderPayloadSize,
+ Constants.DEFAULT_MAX_HEADER_COUNT,
Constants.DEFAULT_MAX_HEADER_SIZE, delayms,
+ failMode);
+ }
+
+
+ private void doTestHeaderLimits(int headerCount, int headerSize, int
maxHeaderPayloadSize,
+ int maxHeaderCount, int maxHeaderSize, int delayms, int failMode)
throws Exception {
+
+ // Build the custom headers
+ Map<String,String> customHeaders = new HashMap<>();
+ StringBuilder headerValue = new StringBuilder(headerSize);
+ // Does not need to be secure
+ Random r = new Random();
+ for (int i = 0; i < headerSize; i++) {
+ // Random lower case characters
+ headerValue.append((char) ('a' + r.nextInt(26)));
+ }
+ String v = headerValue.toString();
+ for (int i = 0; i < headerCount; i++) {
+ customHeaders.put("X-TomcatTest" + i, v);
+ }
+
+ enableHttp2();
+
+ Http2Protocol http2Protocol =
+ (Http2Protocol)
getTomcatInstance().getConnector().findUpgradeProtocols()[0];
+ http2Protocol.setMaxHeaderCount(maxHeaderCount);
+ http2Protocol.setMaxHeaderSize(maxHeaderSize);
+
+ configureAndStartWebApplication();
+ openClientConnection();
+ doHttpUpgrade();
+ sendClientPreface();
+ validateHttp2InitialResponse();
+
+ if (maxHeaderPayloadSize == -1) {
+ maxHeaderPayloadSize = output.getMaxFrameSize();
+ }
+
+ // Build the simple request
+ byte[] frameHeader = new byte[9];
+ // Assumes at least one custom header and that all headers are the same
+ // length. These assumptions are valid for these tests.
+ ByteBuffer headersPayload = ByteBuffer.allocate(200 + (int)
(customHeaders.size() *
+ customHeaders.values().iterator().next().length() * 1.2));
+
+ populateHeadersPayload(headersPayload, customHeaders);
+
+ Exception e = null;
+ try {
+ int written = 0;
+ int left = headersPayload.limit() - written;
+ while (left > 0) {
+ int thisTime = Math.min(left, maxHeaderPayloadSize);
+ populateFrameHeader(frameHeader, written, left, thisTime, 3);
+ writeFrame(frameHeader, headersPayload, headersPayload.limit()
- left,
+ thisTime, delayms);
+ left -= thisTime;
+ written += thisTime;
+ }
+ } catch (IOException ioe) {
+ e = ioe;
+ }
+
+ switch (failMode) {
+ case 0: {
+ // Expect a normal response
+ readSimpleGetResponse();
+ Assert.assertEquals(getSimpleResponseTrace(3), output.getTrace());
+ Assert.assertNull(e);
+ break;
+ }
+ case 1: {
+ // Expect a stream reset
+ parser.readFrame(true);
+ Assert.assertEquals("3-RST-[11]\n", output.getTrace());
+ Assert.assertNull(e);
+ break;
+ }
+ case 2: {
+ // Expect an IOException caused by a connection reset
+ Assert.assertNotNull(e);
+ break;
+ }
+ default: {
+ Assert.fail("Unknown failure mode");
+ }
+ }
+ }
+
+
+ private void populateHeadersPayload(ByteBuffer headersPayload,
Map<String,String> customHeaders)
+ throws Exception {
+ MimeHeaders headers = new MimeHeaders();
+ headers.addValue(":method").setString("GET");
+ headers.addValue(":path").setString("/simple");
+ headers.addValue(":authority").setString("localhost:" + getPort());
+ for (Entry<String,String> customHeader : customHeaders.entrySet()) {
+
headers.addValue(customHeader.getKey()).setString(customHeader.getValue());
+ }
+ State state = hpackEncoder.encode(headers, headersPayload);
+ if (state != State.COMPLETE) {
+ throw new Exception("Unable to build headers");
+ }
+ headersPayload.flip();
+
+ log.debug("Headers payload generated of size [" +
headersPayload.limit() + "]");
+ }
+
+
+ private void populateFrameHeader(byte[] frameHeader, int written, int
left, int thisTime,
+ int streamId) throws Exception {
+ ByteUtil.setThreeBytes(frameHeader, 0, thisTime);
+ if (written == 0) {
+ frameHeader[3] = FrameType.HEADERS.getIdByte();
+ // Flags. End of stream
+ frameHeader[4] = 0x01;
+ } else {
+ frameHeader[3] = FrameType.CONTINUATION.getIdByte();
+ }
+ if (left == thisTime) {
+ // Flags. End of headers
+ frameHeader[4] = (byte) (frameHeader[4] + 0x04);
+ }
+
+ // Stream id
+ ByteUtil.set31Bits(frameHeader, 5, streamId);
+ }
+}
Modified: tomcat/trunk/webapps/docs/config/http2.xml
URL:
http://svn.apache.org/viewvc/tomcat/trunk/webapps/docs/config/http2.xml?rev=1765801&r1=1765800&r2=1765801&view=diff
==============================================================================
--- tomcat/trunk/webapps/docs/config/http2.xml (original)
+++ tomcat/trunk/webapps/docs/config/http2.xml Thu Oct 20 11:06:41 2016
@@ -105,6 +105,23 @@
If not specified, the default value of <code>200</code> will be used.</p>
</attribute>
+ <attribute name="maxHeaderCount" required="false">
+ <p>The maximum number of headers in a request that is allowed by the
+ container. A request that contains more headers than the specified limit
+ will be rejected. A value of less than 0 means no limit.
+ If not specified, a default of 100 is used.</p>
+ </attribute>
+
+ <attribute name="maxHeaderSize" required="false">
+ <p>The maximum total size for all headers in a request that is allowed by
+ the container. Total size for a header is calculated as the uncompressed
+ size of the header name in bytes, plus the uncompressed size of the
header
+ value in bytes plus an HTTP/2 overhead of 3 bytes per header. A request
+ that contains a set of headers that requires more than the specified
limit
+ will be rejected. A value of less than 0 means no limit. If not
specified,
+ a default of 8192 is used.</p>
+ </attribute>
+
<attribute name="readTimeout" required="false">
<p>The time, in milliseconds, that Tomcat will wait for additional data
when a partial HTTP/2 frame has been received. Negative values will be
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]