[Git][java-team/tomcat9][buster] 3 commits: Fixed CVE-2020-17527: HTTP/2 request header mix-up

Emmanuel Bourg gitlab at salsa.debian.org
Wed Jan 20 12:58:29 GMT 2021



Emmanuel Bourg pushed to branch buster at Debian Java Maintainers / tomcat9


Commits:
730e1d5d by Emmanuel Bourg at 2021-01-19T23:10:17+01:00
Fixed CVE-2020-17527: HTTP/2 request header mix-up

- - - - -
100be5b8 by Emmanuel Bourg at 2021-01-19T23:31:00+01:00
Fixed CVE-2020-13943: HTTP/2 request mix-up

- - - - -
aa604058 by Emmanuel Bourg at 2021-01-19T23:31:54+01:00
Upload to buster-security

- - - - -


4 changed files:

- debian/changelog
- + debian/patches/CVE-2020-13943.patch
- + debian/patches/CVE-2020-17527.patch
- debian/patches/series


Changes:

=====================================
debian/changelog
=====================================
@@ -1,3 +1,20 @@
+tomcat9 (9.0.31-1~deb10u3) buster-security; urgency=medium
+
+  * Fixed CVE-2020-13943: HTTP/2 request mix-up. If an HTTP/2 client exceeded
+    the agreed maximum number of concurrent streams for a connection (in
+    violation of the HTTP/2 protocol), it was possible that a subsequent
+    request made on that connection could contain HTTP headers - including
+    HTTP/2 pseudo headers - from a previous request rather than the intended
+    headers. This could lead to users seeing responses for unexpected resources.
+  * Fixed CVE-2020-17527: HTTP/2 request header mix-up. It was discovered that
+    Apache Tomcat could re-use an HTTP request header value from the previous
+    stream received on an HTTP/2 connection for the request associated with
+    the subsequent stream. While this would most likely lead to an error and
+    the closure of the HTTP/2 connection, it is possible that information could
+    leak between requests.
+
+ -- Emmanuel Bourg <ebourg at apache.org>  Tue, 19 Jan 2021 23:31:47 +0100
+
 tomcat9 (9.0.31-1~deb10u2) buster-security; urgency=high
 
   * Team upload.


=====================================
debian/patches/CVE-2020-13943.patch
=====================================
@@ -0,0 +1,109 @@
+From 55911430df13f8c9998fbdee1f9716994d2db59b Mon Sep 17 00:00:00 2001
+From: Mark Thomas <markt at apache.org>
+Date: Thu, 23 Jul 2020 17:43:45 +0100
+Subject: [PATCH] Move check for current streams to end of header parsing.
+
+---
+ java/org/apache/coyote/http2/Http2Parser.java |  2 +-
+ .../coyote/http2/Http2UpgradeHandler.java     | 24 ++++++++++---------
+ .../coyote/http2/TestHttp2Section_5_1.java    | 20 ++++++++++------
+ 3 files changed, 27 insertions(+), 19 deletions(-)
+
+--- a/java/org/apache/coyote/http2/Http2Parser.java
++++ b/java/org/apache/coyote/http2/Http2Parser.java
+@@ -737,7 +737,7 @@
+         HeaderEmitter headersStart(int streamId, boolean headersEndStream)
+                 throws Http2Exception, IOException;
+         void headersContinue(int payloadSize, boolean endOfHeaders);
+-        void headersEnd(int streamId) throws ConnectionException;
++        void headersEnd(int streamId) throws Http2Exception;
+ 
+         // Priority frames (also headers)
+         void reprioritise(int streamId, int parentStreamId, boolean exclusive, int weight)
+--- a/java/org/apache/coyote/http2/Http2UpgradeHandler.java
++++ b/java/org/apache/coyote/http2/Http2UpgradeHandler.java
+@@ -1451,16 +1451,6 @@
+             stream.checkState(FrameType.HEADERS);
+             stream.receivedStartOfHeaders(headersEndStream);
+             closeIdleStreams(streamId);
+-            if (localSettings.getMaxConcurrentStreams() < activeRemoteStreamCount.incrementAndGet()) {
+-                setConnectionTimeoutForStreamCount(activeRemoteStreamCount.decrementAndGet());
+-                // Ignoring maxConcurrentStreams increases the overhead count
+-                increaseOverheadCount();
+-                throw new StreamException(sm.getString("upgradeHandler.tooManyRemoteStreams",
+-                        Long.toString(localSettings.getMaxConcurrentStreams())),
+-                        Http2Error.REFUSED_STREAM, streamId);
+-            }
+-            // Valid new stream reduces the overhead count
+-            reduceOverheadCount();
+             return stream;
+         } else {
+             if (log.isDebugEnabled()) {
+@@ -1528,12 +1518,24 @@
+ 
+ 
+     @Override
+-    public void headersEnd(int streamId) throws ConnectionException {
++    public void headersEnd(int streamId) throws Http2Exception {
+         Stream stream = getStream(streamId, connectionState.get().isNewStreamAllowed());
+         if (stream != null) {
+             setMaxProcessedStream(streamId);
+             if (stream.isActive()) {
+                 if (stream.receivedEndOfHeaders()) {
++
++                    if (localSettings.getMaxConcurrentStreams() < activeRemoteStreamCount.incrementAndGet()) {
++                        setConnectionTimeoutForStreamCount(activeRemoteStreamCount.decrementAndGet());
++                        // Ignoring maxConcurrentStreams increases the overhead count
++                        increaseOverheadCount();
++                        throw new StreamException(sm.getString("upgradeHandler.tooManyRemoteStreams",
++                                Long.toString(localSettings.getMaxConcurrentStreams())),
++                                Http2Error.REFUSED_STREAM, streamId);
++                    }
++                    // Valid new stream reduces the overhead count
++                    reduceOverheadCount();
++
+                     processStreamOnContainerThread(stream);
+                 }
+             }
+--- a/test/org/apache/coyote/http2/TestHttp2Section_5_1.java
++++ b/test/org/apache/coyote/http2/TestHttp2Section_5_1.java
+@@ -222,11 +222,11 @@
+         // Expecting
+         // 1 * headers
+         // 56k-1 of body (7 * ~8k)
+-        // 1 * error (could be in any order)
+-        for (int i = 0; i < 8; i++) {
++        // 1 * error
++        // for a total of 9 frames (could be in any order)
++        for (int i = 0; i < 9; i++) {
+             parser.readFrame(true);
+         }
+-        parser.readFrame(true);
+ 
+         Assert.assertTrue(output.getTrace(),
+                 output.getTrace().contains("5-RST-[" +
+@@ -238,14 +238,20 @@
+ 
+         // Release the remaining body
+         sendWindowUpdate(0, (1 << 31) - 2);
+-        // Allow for the 8k still in the stream window
++        // Allow for the ~8k still in the stream window
+         sendWindowUpdate(3, (1 << 31) - 8193);
+ 
+-        // 192k of body (24 * 8k)
+-        // 1 * error (could be in any order)
+-        for (int i = 0; i < 24; i++) {
++        // Read until the end of stream 3
++        while (!output.getTrace().contains("3-EndOfStream")) {
+             parser.readFrame(true);
+         }
++        output.clearTrace();
++
++        // Confirm another request can be sent once concurrency falls back below limit
++        sendSimpleGetRequest(7);
++        parser.readFrame(true);
++        parser.readFrame(true);
++        Assert.assertEquals(getSimpleResponseTrace(7), output.getTrace());
+     }
+ 
+ 


=====================================
debian/patches/CVE-2020-17527.patch
=====================================
@@ -0,0 +1,46 @@
+From d56293f816d6dc9e2b47107f208fa9e95db58c65 Mon Sep 17 00:00:00 2001
+From: Mark Thomas <markt at apache.org>
+Date: Mon, 9 Nov 2020 19:23:12 +0000
+Subject: [PATCH] Fix BZ 64830 - concurrency issue in HPACK decoder
+
+https://bz.apache.org/bugzilla/show_bug.cgi?id=64830
+---
+ java/org/apache/coyote/http2/HpackDecoder.java | 12 ++++--------
+ webapps/docs/changelog.xml                     |  3 +++
+ 2 files changed, 7 insertions(+), 8 deletions(-)
+
+--- a/java/org/apache/coyote/http2/HpackDecoder.java
++++ b/java/org/apache/coyote/http2/HpackDecoder.java
+@@ -72,8 +72,6 @@
+     private volatile boolean countedCookie;
+     private volatile int headerSize = 0;
+ 
+-    private final StringBuilder stringBuilder = new StringBuilder();
+-
+     HpackDecoder(int maxMemorySize) {
+         this.maxMemorySizeHard = maxMemorySize;
+         this.maxMemorySizeSoft = maxMemorySize;
+@@ -222,19 +220,17 @@
+         if (huffman) {
+             return readHuffmanString(length, buffer);
+         }
++        StringBuilder stringBuilder = new StringBuilder(length);
+         for (int i = 0; i < length; ++i) {
+             stringBuilder.append((char) buffer.get());
+         }
+-        String ret = stringBuilder.toString();
+-        stringBuilder.setLength(0);
+-        return ret;
++        return stringBuilder.toString();
+     }
+ 
+     private String readHuffmanString(int length, ByteBuffer buffer) throws HpackException {
++        StringBuilder stringBuilder = new StringBuilder(length);
+         HPackHuffman.decode(buffer, length, stringBuilder);
+-        String ret = stringBuilder.toString();
+-        stringBuilder.setLength(0);
+-        return ret;
++        return stringBuilder.toString();
+     }
+ 
+     private String handleIndexedHeaderName(int index) throws HpackException {


=====================================
debian/patches/series
=====================================
@@ -17,3 +17,5 @@ CVE-2020-13935.patch
 CVE-2020-11996.patch
 CVE-2020-9484.patch
 debian-bug-959937.patch
+CVE-2020-13943.patch
+CVE-2020-17527.patch



View it on GitLab: https://salsa.debian.org/java-team/tomcat9/-/compare/1ac6d044793df53915acd6abc7e454907cba1cc1...aa604058b5d7fdb317bd7bbb22e2f3cf0d990b77

-- 
View it on GitLab: https://salsa.debian.org/java-team/tomcat9/-/compare/1ac6d044793df53915acd6abc7e454907cba1cc1...aa604058b5d7fdb317bd7bbb22e2f3cf0d990b77
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-java-commits/attachments/20210120/eabc9892/attachment.html>


More information about the pkg-java-commits mailing list