[Git][java-team/netty][master] 2 commits: Fix test for Junit4 of CVE-2025-58057

Bastien Roucariès (@rouca) gitlab at salsa.debian.org
Tue Nov 25 22:10:06 GMT 2025



Bastien Roucariès pushed to branch master at Debian Java Maintainers / netty


Commits:
8f141aa1 by Bastien Roucariès at 2025-11-25T23:06:11+01:00
Fix test for Junit4 of CVE-2025-58057

- - - - -
39e98018 by Bastien Roucariès at 2025-11-25T23:09:11+01:00
Fix CVE-2025-58056

- - - - -


4 changed files:

- debian/changelog
- + debian/patches/CVE-2025-58056.patch
- debian/patches/CVE-2025-58057.patch
- debian/patches/series


Changes:

=====================================
debian/changelog
=====================================
@@ -1,3 +1,21 @@
+netty (1:4.1.48-13) unstable; urgency=high
+
+  * Team upload
+  * Fix test for junit4 for CVE-2025-58057 improving
+    backporting. Thanks to Edwin Jiang.
+  * Fix CVE-2025-58056 (Closes: #1113995)
+    Netty incorrectly accepts standalone newline
+    characters (LF) as a chunk-size line terminator,
+    regardless of a preceding carriage return (CR),
+    instead of requiring CRLF per HTTP/1.1 standards.
+    When combined with reverse proxies that parse LF
+    differently (treating it as part of the
+    chunk extension), attackers can craft requests
+    that the proxy sees as one request but Netty
+    processes as two, enabling request smuggling attacks. 
+
+ -- Bastien Roucariès <rouca at debian.org>  Tue, 25 Nov 2025 23:06:00 +0100
+
 netty (1:4.1.48-12) unstable; urgency=high
 
   * Team upload


=====================================
debian/patches/CVE-2025-58056.patch
=====================================
@@ -0,0 +1,724 @@
+From 39d3ecf8f0c57a7469ba927b2163d4cb4314b138 Mon Sep 17 00:00:00 2001
+From: Chris Vest <christianvest_hansen at apple.com>
+Date: Tue, 2 Sep 2025 23:25:09 -0700
+Subject: [PATCH] Merge commit from fork
+
+* Prevent HTTP request/response smuggling via chunk encoding
+
+Motivation:
+Transfer-Encoding: chunked has some strict rules around parsing CR LF delimiters.
+If we are too lenient, it can cause request/response smuggling issues when combined with proxies that are lenient in different ways.
+See https://w4ke.info/2025/06/18/funky-chunks.html for the details.
+
+Modification:
+- Make sure that we reject chunks with chunk-extensions that contain lone Line Feed octets without their preceding Carriage Return octet.
+- Make sure that we issue HttpContent objects with decoding failures, if we decode a chunk and it isn't immediately followed by a CR LF octet pair.
+
+Result:
+Smuggling requests/responses is no longer possible.
+
+Fixes https://github.com/netty/netty/issues/15522
+
+* Enforce CR LF line separators for HTTP messages by default
+
+But also make it configurable through `HttpDecoderConfig`, and add a system property opt-out to change the default back.
+
+* Remove property for the name of the strict line parsing property
+
+* Remove HeaderParser.parse overload that only takes a buffer argument
+
+Origin: backport, https://github.com/netty/netty/commit/39d3ecf8f0c57a7469ba927b2163d4cb4314b138
+
+[Ubuntu note: This patch uses a new constructor to configure strict line
+parsing (since HttpDecoderConfig.java does not exist), and uses a new field to
+pass strictCRLFCheck from HeaderParser.parse() to HeaderParser.process().
+-- Edwin Jiang <edwin.jiang at canonical.com]
+---
+ codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectDecoder.java                |   71 +++-
+ codec-http/src/main/java/io/netty/handler/codec/http/HttpRequestDecoder.java               |    9 
+ codec-http/src/main/java/io/netty/handler/codec/http/HttpResponseDecoder.java              |    9 
+ codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkExtensionException.java   |   45 +++
+ codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkTerminationException.java |   45 +++
+ codec-http/src/main/java/io/netty/handler/codec/http/InvalidLineSeparatorException.java    |   48 +++
+ codec-http/src/test/java/io/netty/handler/codec/http/HttpRequestDecoderTest.java           |  148 ++++++++--
+ codec-http/src/test/java/io/netty/handler/codec/http/HttpResponseDecoderTest.java          |   62 ++++
+ 8 files changed, 403 insertions(+), 34 deletions(-)
+ create mode 100644 codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkExtensionException.java
+ create mode 100644 codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkTerminationException.java
+ create mode 100644 codec-http/src/main/java/io/netty/handler/codec/http/InvalidLineSeparatorException.java
+
+--- a/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectDecoder.java
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/HttpObjectDecoder.java
+@@ -27,6 +27,7 @@
+ import io.netty.handler.codec.TooLongFrameException;
+ import io.netty.util.ByteProcessor;
+ import io.netty.util.internal.AppendableCharSequence;
++import io.netty.util.internal.SystemPropertyUtil;
+ 
+ import java.util.List;
+ 
+@@ -104,11 +105,28 @@
+ public abstract class HttpObjectDecoder extends ByteToMessageDecoder {
+     public static final boolean DEFAULT_ALLOW_DUPLICATE_CONTENT_LENGTHS = false;
+     private static final String EMPTY_VALUE = "";
++    public static final boolean DEFAULT_STRICT_LINE_PARSING =
++            SystemPropertyUtil.getBoolean("io.netty.handler.codec.http.defaultStrictLineParsing", true);
++
++    private static final Runnable THROW_INVALID_CHUNK_EXTENSION = new Runnable() {
++        @Override
++        public void run() {
++            throw new InvalidChunkExtensionException();
++        }
++    };
++
++    private static final Runnable THROW_INVALID_LINE_SEPARATOR = new Runnable() {
++        @Override
++        public void run() {
++            throw new InvalidLineSeparatorException();
++        }
++    };
+ 
+     private final int maxChunkSize;
+     private final boolean chunkedSupported;
+     protected final boolean validateHeaders;
+     private final boolean allowDuplicateContentLengths;
++    private final Runnable defaultStrictCRLFCheck;
+     private final HeaderParser headerParser;
+     private final LineParser lineParser;
+ 
+@@ -180,6 +198,15 @@
+             int maxInitialLineLength, int maxHeaderSize, int maxChunkSize,
+             boolean chunkedSupported, boolean validateHeaders, int initialBufferSize,
+             boolean allowDuplicateContentLengths) {
++        this(maxInitialLineLength, maxHeaderSize, maxChunkSize, chunkedSupported,
++             validateHeaders, initialBufferSize, allowDuplicateContentLengths,
++             DEFAULT_STRICT_LINE_PARSING);
++    }
++
++    protected HttpObjectDecoder(
++            int maxInitialLineLength, int maxHeaderSize, int maxChunkSize,
++            boolean chunkedSupported, boolean validateHeaders, int initialBufferSize,
++            boolean allowDuplicateContentLengths, boolean strictLineParsing) {
+         checkPositive(maxInitialLineLength, "maxInitialLineLength");
+         checkPositive(maxHeaderSize, "maxHeaderSize");
+         checkPositive(maxChunkSize, "maxChunkSize");
+@@ -191,6 +218,7 @@
+         this.chunkedSupported = chunkedSupported;
+         this.validateHeaders = validateHeaders;
+         this.allowDuplicateContentLengths = allowDuplicateContentLengths;
++        defaultStrictCRLFCheck = strictLineParsing ? THROW_INVALID_LINE_SEPARATOR : null;
+     }
+ 
+     @Override
+@@ -203,7 +231,7 @@
+         case SKIP_CONTROL_CHARS:
+             // Fall-through
+         case READ_INITIAL: try {
+-            AppendableCharSequence line = lineParser.parse(buffer);
++            AppendableCharSequence line = lineParser.parse(buffer, defaultStrictCRLFCheck);
+             if (line == null) {
+                 return;
+             }
+@@ -313,11 +341,11 @@
+             return;
+         }
+         /**
+-         * everything else after this point takes care of reading chunked content. basically, read chunk size,
++         * Everything else after this point takes care of reading chunked content. Basically, read chunk size,
+          * read chunk, read and ignore the CRLF and repeat until 0
+          */
+         case READ_CHUNK_SIZE: try {
+-            AppendableCharSequence line = lineParser.parse(buffer);
++            AppendableCharSequence line = lineParser.parse(buffer, THROW_INVALID_CHUNK_EXTENSION);
+             if (line == null) {
+                 return;
+             }
+@@ -352,16 +380,16 @@
+             // fall-through
+         }
+         case READ_CHUNK_DELIMITER: {
+-            final int wIdx = buffer.writerIndex();
+-            int rIdx = buffer.readerIndex();
+-            while (wIdx > rIdx) {
+-                byte next = buffer.getByte(rIdx++);
+-                if (next == HttpConstants.LF) {
++            if (buffer.readableBytes() >= 2) {
++                int rIdx = buffer.readerIndex();
++                if (buffer.getByte(rIdx) == HttpConstants.CR &&
++                        buffer.getByte(rIdx + 1) == HttpConstants.LF) {
++                    buffer.skipBytes(2);
+                     currentState = State.READ_CHUNK_SIZE;
+-                    break;
++                } else {
++                    out.add(invalidChunk(buffer, new InvalidChunkTerminationException()));
+                 }
+             }
+-            buffer.readerIndex(rIdx);
+             return;
+         }
+         case READ_CHUNK_FOOTER: try {
+@@ -560,7 +588,7 @@
+         final HttpMessage message = this.message;
+         final HttpHeaders headers = message.headers();
+ 
+-        AppendableCharSequence line = headerParser.parse(buffer);
++        AppendableCharSequence line = headerParser.parse(buffer, defaultStrictCRLFCheck);
+         if (line == null) {
+             return null;
+         }
+@@ -580,7 +608,7 @@
+                     splitHeader(line);
+                 }
+ 
+-                line = headerParser.parse(buffer);
++                line = headerParser.parse(buffer, defaultStrictCRLFCheck);
+                 if (line == null) {
+                     return null;
+                 }
+@@ -661,7 +689,7 @@
+     }
+ 
+     private LastHttpContent readTrailingHeaders(ByteBuf buffer) {
+-        AppendableCharSequence line = headerParser.parse(buffer);
++        AppendableCharSequence line = headerParser.parse(buffer, defaultStrictCRLFCheck);
+         if (line == null) {
+             return null;
+         }
+@@ -701,7 +729,7 @@
+                 name = null;
+                 value = null;
+             }
+-            line = headerParser.parse(buffer);
++            line = headerParser.parse(buffer, defaultStrictCRLFCheck);
+             if (line == null) {
+                 return null;
+             }
+@@ -865,14 +893,19 @@
+         private final int maxLength;
+         private int size;
+ 
++        private Runnable strictCRLFCheck;
++
+         HeaderParser(AppendableCharSequence seq, int maxLength) {
+             this.seq = seq;
+             this.maxLength = maxLength;
+         }
+ 
+-        public AppendableCharSequence parse(ByteBuf buffer) {
++        public AppendableCharSequence parse(ByteBuf buffer, Runnable strictCRLFCheck) {
+             final int oldSize = size;
+             seq.reset();
++
++            this.strictCRLFCheck = strictCRLFCheck;
++
+             int i = buffer.forEachByte(this);
+             if (i == -1) {
+                 size = oldSize;
+@@ -895,6 +928,10 @@
+                 if (len >= 1 && seq.charAtUnsafe(len - 1) == HttpConstants.CR) {
+                     -- size;
+                     seq.setLength(len - 1);
++                } else {
++                    if (strictCRLFCheck != null) {
++                        strictCRLFCheck.run();
++                    }
+                 }
+                 return false;
+             }
+@@ -927,9 +964,9 @@
+         }
+ 
+         @Override
+-        public AppendableCharSequence parse(ByteBuf buffer) {
++        public AppendableCharSequence parse(ByteBuf buffer, Runnable strictCRLFCheck) {
+             reset();
+-            return super.parse(buffer);
++            return super.parse(buffer, strictCRLFCheck);
+         }
+ 
+         @Override
+--- /dev/null
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkExtensionException.java
+@@ -0,0 +1,45 @@
++/*
++ * Copyright 2025 The Netty Project
++ *
++ * The Netty Project licenses this file to you under the Apache License,
++ * version 2.0 (the "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at:
++ *
++ *   https://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
++ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
++ * License for the specific language governing permissions and limitations
++ * under the License.
++ */
++package io.netty.handler.codec.http;
++
++import io.netty.handler.codec.CorruptedFrameException;
++
++/**
++ * Thrown when HTTP chunk extensions could not be parsed, typically due to incorrect use of CR LF delimiters.
++ * <p>
++ * <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-chunked-transfer-coding">RFC 9112</a>
++ * specifies that chunk header lines must be terminated in a CR LF pair,
++ * and that a lone LF octet is not allowed within the chunk header line.
++ */
++public final class InvalidChunkExtensionException extends CorruptedFrameException {
++    private static final long serialVersionUID = 536224937231200736L;
++
++    public InvalidChunkExtensionException() {
++        super("Line Feed must be preceded by Carriage Return when terminating HTTP chunk header lines");
++    }
++
++    public InvalidChunkExtensionException(String message, Throwable cause) {
++        super(message, cause);
++    }
++
++    public InvalidChunkExtensionException(String message) {
++        super(message);
++    }
++
++    public InvalidChunkExtensionException(Throwable cause) {
++        super(cause);
++    }
++}
+--- /dev/null
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/InvalidChunkTerminationException.java
+@@ -0,0 +1,45 @@
++/*
++ * Copyright 2025 The Netty Project
++ *
++ * The Netty Project licenses this file to you under the Apache License,
++ * version 2.0 (the "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at:
++ *
++ *   https://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
++ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
++ * License for the specific language governing permissions and limitations
++ * under the License.
++ */
++package io.netty.handler.codec.http;
++
++import io.netty.handler.codec.CorruptedFrameException;
++
++/**
++ * Thrown when HTTP chunks could not be parsed, typically due to incorrect use of CR LF delimiters.
++ * <p>
++ * <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-chunked-transfer-coding">RFC 9112</a>
++ * specifies that chunk bodies must be terminated in a CR LF pair,
++ * and that the delimiter must follow the given chunk-size number of octets in chunk-data.
++ */
++public final class InvalidChunkTerminationException extends CorruptedFrameException {
++    private static final long serialVersionUID = 536224937231200736L;
++
++    public InvalidChunkTerminationException() {
++        super("Chunk data sections must be terminated by a CR LF octet pair");
++    }
++
++    public InvalidChunkTerminationException(String message, Throwable cause) {
++        super(message, cause);
++    }
++
++    public InvalidChunkTerminationException(String message) {
++        super(message);
++    }
++
++    public InvalidChunkTerminationException(Throwable cause) {
++        super(cause);
++    }
++}
+--- /dev/null
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/InvalidLineSeparatorException.java
+@@ -0,0 +1,48 @@
++/*
++ * Copyright 2025 The Netty Project
++ *
++ * The Netty Project licenses this file to you under the Apache License,
++ * version 2.0 (the "License"); you may not use this file except in compliance
++ * with the License. You may obtain a copy of the License at:
++ *
++ *   https://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
++ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
++ * License for the specific language governing permissions and limitations
++ * under the License.
++ */
++package io.netty.handler.codec.http;
++
++import io.netty.handler.codec.DecoderException;
++
++/**
++ * Thrown when {@linkplain HttpDecoderConfig#isStrictLineParsing() strict line parsing} is enabled,
++ * and HTTP start- and header field-lines are not seperated by CR LF octet pairs.
++ * <p>
++ * Strict line parsing is enabled by default since Netty 4.1.124 and 4.2.4.
++ * This default can be overridden by setting the {@value HttpObjectDecoder#PROP_DEFAULT_STRICT_LINE_PARSING} system
++ * property to {@code false}.
++ * <p>
++ * See <a href="https://datatracker.ietf.org/doc/html/rfc9112#name-message-format">RFC 9112 Section 2.1</a>.
++ */
++public final class InvalidLineSeparatorException extends DecoderException {
++    private static final long serialVersionUID = 536224937231200736L;
++
++    public InvalidLineSeparatorException() {
++        super("Line Feed must be preceded by Carriage Return when terminating HTTP start- and header field-lines");
++    }
++
++    public InvalidLineSeparatorException(String message, Throwable cause) {
++        super(message, cause);
++    }
++
++    public InvalidLineSeparatorException(String message) {
++        super(message);
++    }
++
++    public InvalidLineSeparatorException(Throwable cause) {
++        super(cause);
++    }
++}
+--- a/codec-http/src/test/java/io/netty/handler/codec/http/HttpRequestDecoderTest.java
++++ b/codec-http/src/test/java/io/netty/handler/codec/http/HttpRequestDecoderTest.java
+@@ -19,6 +19,7 @@
+ import io.netty.buffer.Unpooled;
+ import io.netty.channel.embedded.EmbeddedChannel;
+ import io.netty.handler.codec.TooLongFrameException;
++import io.netty.handler.codec.DecoderResult;
+ import io.netty.util.AsciiString;
+ import io.netty.util.CharsetUtil;
+ import org.junit.Test;
+@@ -43,6 +44,12 @@
+     private static final byte[] CONTENT_MIXED_DELIMITERS = createContent("\r\n", "\n");
+     private static final int CONTENT_LENGTH = 8;
+ 
++    private static final int DEFAULT_MAX_INITIAL_LINE_LENGTH = 4096;
++    private static final int DEFAULT_MAX_HEADER_SIZE = 8192;
++    private static final int DEFAULT_MAX_CHUNK_SIZE = 8192;
++    private static final boolean DEFAULT_VALIDATE_HEADERS = true;
++    private static final int DEFAULT_INITIAL_BUFFER_SIZE = 128;
++
+     private static byte[] createContent(String... lineDelimiters) {
+         String lineDelimiter;
+         String lineDelimiter2;
+@@ -80,18 +87,45 @@
+         testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS);
+     }
+ 
++    @Test
++    public void testDecodeWholeRequestAtOnceFailesWithLFDelimiters() {
++        testDecodeWholeRequestAtOnce(CONTENT_LF_DELIMITERS, DEFAULT_MAX_HEADER_SIZE, true, true);
++    }
++
++    @Test
++    public void testDecodeWholeRequestAtOnceFailsWithMixedDelimiters() {
++        testDecodeWholeRequestAtOnce(CONTENT_MIXED_DELIMITERS, DEFAULT_MAX_HEADER_SIZE, true, true);
++    }
++
+     private static void testDecodeWholeRequestAtOnce(byte[] content) {
+-        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
++        testDecodeWholeRequestAtOnce(content, DEFAULT_MAX_HEADER_SIZE, false, false);
++    }
++
++    private static void testDecodeWholeRequestAtOnce(byte[] content, int maxHeaderSize, boolean strictLineParsing,
++                                                     boolean expectFailure) {
++        EmbeddedChannel channel =
++                new EmbeddedChannel(new HttpRequestDecoder(DEFAULT_MAX_INITIAL_LINE_LENGTH,
++                                                           maxHeaderSize,
++                                                           DEFAULT_MAX_CHUNK_SIZE,
++                                                           DEFAULT_VALIDATE_HEADERS,
++                                                           DEFAULT_INITIAL_BUFFER_SIZE,
++                                                           strictLineParsing));
+         assertTrue(channel.writeInbound(Unpooled.copiedBuffer(content)));
+         HttpRequest req = channel.readInbound();
+         assertNotNull(req);
+-        checkHeaders(req.headers());
+-        LastHttpContent c = channel.readInbound();
+-        assertEquals(CONTENT_LENGTH, c.content().readableBytes());
+-        assertEquals(
+-                Unpooled.wrappedBuffer(content, content.length - CONTENT_LENGTH, CONTENT_LENGTH),
+-                c.content().readSlice(CONTENT_LENGTH));
+-        c.release();
++        if (expectFailure) {
++            assertTrue(req.decoderResult().isFailure());
++            assertThat(req.decoderResult().cause(), instanceOf(InvalidLineSeparatorException.class));
++        } else {
++            assertFalse(req.decoderResult().isFailure());
++            checkHeaders(req.headers());
++            LastHttpContent c = channel.readInbound();
++            assertEquals(CONTENT_LENGTH, c.content().readableBytes());
++            assertEquals(
++                    Unpooled.wrappedBuffer(content, content.length - CONTENT_LENGTH, CONTENT_LENGTH),
++                    c.content().readSlice(CONTENT_LENGTH));
++            c.release();
++        }
+ 
+         assertFalse(channel.finish());
+         assertNull(channel.readInbound());
+@@ -116,27 +150,45 @@
+ 
+     @Test
+     public void testDecodeWholeRequestInMultipleStepsCRLFDelimiters() {
+-        testDecodeWholeRequestInMultipleSteps(CONTENT_CRLF_DELIMITERS);
++        testDecodeWholeRequestInMultipleSteps(CONTENT_CRLF_DELIMITERS, true, false);
+     }
+ 
+     @Test
+     public void testDecodeWholeRequestInMultipleStepsLFDelimiters() {
+-        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS);
++        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS, false, false);
+     }
+ 
+     @Test
+     public void testDecodeWholeRequestInMultipleStepsMixedDelimiters() {
+-        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS);
++        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS, false, false);
+     }
+ 
+-    private static void testDecodeWholeRequestInMultipleSteps(byte[] content) {
++    @Test
++    public void testDecodeWholeRequestInMultipleStepsFailsWithLFDelimiters() {
++        testDecodeWholeRequestInMultipleSteps(CONTENT_LF_DELIMITERS, true, true);
++    }
++
++    @Test
++    public void testDecodeWholeRequestInMultipleStepsFailsWithMixedDelimiters() {
++        testDecodeWholeRequestInMultipleSteps(CONTENT_MIXED_DELIMITERS, true, true);
++    }
++
++    private static void testDecodeWholeRequestInMultipleSteps(
++            byte[] content, boolean strictLineParsing, boolean expectFailure) {
+         for (int i = 1; i < content.length; i++) {
+-            testDecodeWholeRequestInMultipleSteps(content, i);
++            testDecodeWholeRequestInMultipleSteps(content, i, strictLineParsing, expectFailure);
+         }
+     }
+ 
+-    private static void testDecodeWholeRequestInMultipleSteps(byte[] content, int fragmentSize) {
+-        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
++    private static void testDecodeWholeRequestInMultipleSteps(
++            byte[] content, int fragmentSize, boolean strictLineParsing, boolean expectFailure) {
++                EmbeddedChannel channel =
++                new EmbeddedChannel(new HttpRequestDecoder(DEFAULT_MAX_INITIAL_LINE_LENGTH,
++                                                           DEFAULT_MAX_HEADER_SIZE,
++                                                           DEFAULT_MAX_CHUNK_SIZE,
++                                                           DEFAULT_VALIDATE_HEADERS,
++                                                           DEFAULT_INITIAL_BUFFER_SIZE,
++                                                           strictLineParsing));
+         int headerLength = content.length - CONTENT_LENGTH;
+ 
+         // split up the header
+@@ -158,6 +210,12 @@
+ 
+         HttpRequest req = channel.readInbound();
+         assertNotNull(req);
++        if (expectFailure) {
++            assertTrue(req.decoderResult().isFailure());
++            assertThat(req.decoderResult().cause(), instanceOf(InvalidLineSeparatorException.class));
++            return; // No more messages will be produced.
++        }
++        assertFalse(req.decoderResult().isFailure());
+         checkHeaders(req.headers());
+ 
+         for (int i = CONTENT_LENGTH; i > 1; i --) {
+@@ -531,6 +589,66 @@
+     }
+ 
+     @Test
++    public void mustRejectImproperlyTerminatedChunkExtensions() throws Exception {
++        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
++        String requestStr = "GET /one HTTP/1.1\r\n" +
++                "Host: localhost\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "2;\n" + // Chunk size followed by illegal single newline (not preceded by carraige return)
++                "xx\r\n" +
++                "45\r\n" +
++                "0\r\n\r\n" +
++                "GET /two HTTP/1.1\r\n" +
++                "Host: localhost\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "0\r\n\r\n";
++        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
++        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
++        HttpRequest request = channel.readInbound();
++        assertFalse(request.decoderResult().isFailure()); // We parse the headers just fine.
++        assertTrue(request.headers().names().contains("Transfer-Encoding"));
++        assertTrue(request.headers().contains("Transfer-Encoding", "chunked", false));
++        HttpContent content = channel.readInbound();
++        DecoderResult decoderResult = content.decoderResult();
++        assertTrue(decoderResult.isFailure()); // But parsing the chunk must fail.
++        assertThat(decoderResult.cause(), instanceOf(InvalidChunkExtensionException.class));
++        content.release();
++        assertFalse(channel.finish());
++    }
++
++    @Test
++    public void mustRejectImproperlyTerminatedChunkBodies() throws Exception {
++        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
++        String requestStr = "GET /one HTTP/1.1\r\n" +
++                "Host: localhost\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "5\r\n" +
++                "AAAAAXX" + // Chunk body contains extra (XX) bytes, and no CRLF terminator.
++                "45\r\n" +
++                "0\r\n" +
++                "GET /two HTTP/1.1\r\n" +
++                "Host: localhost\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "0\r\n\r\n";
++        EmbeddedChannel channel = new EmbeddedChannel(new HttpRequestDecoder());
++        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
++        HttpRequest request = channel.readInbound();
++        assertFalse(request.decoderResult().isFailure()); // We parse the headers just fine.
++        assertTrue(request.headers().names().contains("Transfer-Encoding"));
++        assertTrue(request.headers().contains("Transfer-Encoding", "chunked", false));
++        HttpContent content = channel.readInbound();
++        assertFalse(content.decoderResult().isFailure()); // We parse the content promised by the chunk length.
++        content.release();
++
++        content = channel.readInbound();
++        DecoderResult decoderResult = content.decoderResult();
++        assertTrue(decoderResult.isFailure()); // But then parsing the chunk delimiter must fail.
++        assertThat(decoderResult.cause(), instanceOf(InvalidChunkTerminationException.class));
++        content.release();
++        assertFalse(channel.finish());
++    }
++
++    @Test
+     public void testContentLengthHeaderAndChunked() {
+         String requestStr = "POST / HTTP/1.1\r\n" +
+                 "Host: example.com\r\n" +
+--- a/codec-http/src/test/java/io/netty/handler/codec/http/HttpResponseDecoderTest.java
++++ b/codec-http/src/test/java/io/netty/handler/codec/http/HttpResponseDecoderTest.java
+@@ -18,6 +18,7 @@
+ import io.netty.buffer.ByteBuf;
+ import io.netty.buffer.Unpooled;
+ import io.netty.channel.embedded.EmbeddedChannel;
++import io.netty.handler.codec.DecoderResult;
+ import io.netty.handler.codec.PrematureChannelClosureException;
+ import io.netty.handler.codec.TooLongFrameException;
+ import io.netty.util.CharsetUtil;
+@@ -672,6 +673,63 @@
+     }
+ 
+     @Test
++    public void mustRejectImproperlyTerminatedChunkExtensions() throws Exception {
++        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
++        String requestStr = "HTTP/1.1 200 OK\r\n" +
++                "Transfer-Encoding: chunked\r\n" +
++                "\r\n" +
++                "2;\n" + // Chunk size followed by illegal single newline (not preceded by carraige return)
++                "xx\r\n" +
++                "1D\r\n" +
++                "0\r\n\r\n" +
++                "HTTP/1.1 200 OK\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "0\r\n\r\n";
++        EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
++        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
++        HttpResponse response = channel.readInbound();
++        assertFalse(response.decoderResult().isFailure()); // We parse the headers just fine.
++        assertTrue(response.headers().names().contains("Transfer-Encoding"));
++        assertTrue(response.headers().contains("Transfer-Encoding", "chunked", false));
++        HttpContent content = channel.readInbound();
++        DecoderResult decoderResult = content.decoderResult();
++        assertTrue(decoderResult.isFailure()); // But parsing the chunk must fail.
++        assertThat(decoderResult.cause(), instanceOf(InvalidChunkExtensionException.class));
++        content.release();
++        assertFalse(channel.finish());
++    }
++
++    @Test
++    public void mustRejectImproperlyTerminatedChunkBodies() throws Exception {
++        // See full explanation: https://w4ke.info/2025/06/18/funky-chunks.html
++        String requestStr = "HTTP/1.1 200 OK\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "5\r\n" +
++                "AAAAXX" + // Chunk body contains extra (XX) bytes, and no CRLF terminator.
++                "1D\r\n" +
++                "0\r\n" +
++                "HTTP/1.1 200 OK\r\n" +
++                "Transfer-Encoding: chunked\r\n\r\n" +
++                "0\r\n\r\n";
++        EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
++        assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
++        HttpResponse response = channel.readInbound();
++        assertFalse(response.decoderResult().isFailure()); // We parse the headers just fine.
++        assertTrue(response.headers().names().contains("Transfer-Encoding"));
++        assertTrue(response.headers().contains("Transfer-Encoding", "chunked", false));
++        HttpContent content = channel.readInbound();
++        assertFalse(content.decoderResult().isFailure()); // We parse the content promised by the chunk length.
++        content.release();
++
++        content = channel.readInbound();
++        DecoderResult decoderResult = content.decoderResult();
++        assertTrue(decoderResult.isFailure()); // But then parsing the chunk delimiter must fail.
++        assertThat(decoderResult.cause(), instanceOf(InvalidChunkTerminationException.class));
++        content.release();
++        assertFalse(channel.finish());
++    }
++
++    @Test
+     public void testConnectionClosedBeforeHeadersReceived() {
+         EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
+         String responseInitialLine =
+@@ -718,7 +776,7 @@
+         EmbeddedChannel channel = new EmbeddedChannel(new HttpResponseDecoder());
+         String requestStr = "HTTP/1.1 200 OK\r\n" +
+                 "Transfer-Encoding : chunked\r\n" +
+-                "Host: netty.io\n\r\n";
++                "Host: netty.io\r\n\r\n";
+ 
+         assertTrue(channel.writeInbound(Unpooled.copiedBuffer(requestStr, CharsetUtil.US_ASCII)));
+         HttpResponse response = channel.readInbound();
+@@ -787,7 +845,7 @@
+         testHeaderNameEndsWithControlChar(0x0c);
+     }
+ 
+-    private void testHeaderNameEndsWithControlChar(int controlChar) {
++    private static void testHeaderNameEndsWithControlChar(int controlChar) {
+         ByteBuf responseBuffer = Unpooled.buffer();
+         responseBuffer.writeCharSequence("HTTP/1.1 200 OK\r\n" +
+                 "Host: netty.io\r\n", CharsetUtil.US_ASCII);
+--- a/codec-http/src/main/java/io/netty/handler/codec/http/HttpRequestDecoder.java
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/HttpRequestDecoder.java
+@@ -81,6 +81,15 @@
+         super(maxInitialLineLength, maxHeaderSize, maxChunkSize, true, validateHeaders, initialBufferSize);
+     }
+ 
++    public HttpRequestDecoder(
++            int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean validateHeaders,
++            int initialBufferSize, boolean strictLineParsing) {
++        super(maxInitialLineLength, maxHeaderSize, maxChunkSize, true,
++              validateHeaders, initialBufferSize,
++              HttpObjectDecoder.DEFAULT_ALLOW_DUPLICATE_CONTENT_LENGTHS,
++              strictLineParsing);
++    }
++
+     @Override
+     protected HttpMessage createMessage(String[] initialLine) throws Exception {
+         return new DefaultHttpRequest(
+--- a/codec-http/src/main/java/io/netty/handler/codec/http/HttpResponseDecoder.java
++++ b/codec-http/src/main/java/io/netty/handler/codec/http/HttpResponseDecoder.java
+@@ -112,6 +112,15 @@
+         super(maxInitialLineLength, maxHeaderSize, maxChunkSize, true, validateHeaders, initialBufferSize);
+     }
+ 
++    public HttpResponseDecoder(
++            int maxInitialLineLength, int maxHeaderSize, int maxChunkSize, boolean validateHeaders,
++            int initialBufferSize, boolean strictLineParsing) {
++        super(maxInitialLineLength, maxHeaderSize, maxChunkSize, true,
++              validateHeaders, initialBufferSize,
++              HttpObjectDecoder.DEFAULT_ALLOW_DUPLICATE_CONTENT_LENGTHS,
++              strictLineParsing);
++    }
++
+     @Override
+     protected HttpMessage createMessage(String[] initialLine) {
+         return new DefaultHttpResponse(


=====================================
debian/patches/CVE-2025-58057.patch
=====================================
@@ -341,17 +341,16 @@ diff --git a/codec-http/src/test/java/io/netty/handler/codec/http/HttpContentDec
 index 4a659fa..f54e98d 100644
 --- a/codec-http/src/test/java/io/netty/handler/codec/http/HttpContentDecompressorTest.java
 +++ b/codec-http/src/test/java/io/netty/handler/codec/http/HttpContentDecompressorTest.java
-@@ -15,6 +15,9 @@
+@@ -15,6 +15,8 @@
   */
  package io.netty.handler.codec.http;
  
-+import io.netty.buffer.AdaptiveByteBufAllocator;
 +import io.netty.buffer.ByteBuf;
 +import io.netty.buffer.PooledByteBufAllocator;
  import io.netty.buffer.Unpooled;
  import io.netty.channel.ChannelHandlerContext;
  import io.netty.channel.ChannelInboundHandlerAdapter;
-@@ -23,6 +26,8 @@ import io.netty.channel.embedded.EmbeddedChannel;
+@@ -23,6 +25,8 @@
  import org.junit.Assert;
  import org.junit.Test;
  
@@ -360,22 +359,27 @@ index 4a659fa..f54e98d 100644
  import java.util.concurrent.atomic.AtomicInteger;
  
  public class HttpContentDecompressorTest {
-@@ -67,4 +72,87 @@ public class HttpContentDecompressorTest {
+@@ -67,4 +71,92 @@
          Assert.assertEquals(2, readCalled.get());
          Assert.assertFalse(channel.finishAndReleaseAll());
      }
 +
-+    static String[] encodings() {
-+        List<String> encodings = new ArrayList<String>();
-+        encodings.add("gzip");
-+        encodings.add("deflate");
-+        encodings.add("snappy");
-+        return encodings.toArray(new String[0]);
++    @Test
++    public void testZipBombGzip() {
++        testZipBomb("gzip");
++    }
++
++    @Test
++    public void testZipBombDeflate() {
++        testZipBomb("deflate");
++    }
++
++    @Test
++    public void testZipBombSnappy() {
++        testZipBomb("snappy");
 +    }
 +
-+    @ParameterizedTest
-+    @MethodSource("encodings")
-+    public void testZipBomb(String encoding) {
++    private static void testZipBomb(String encoding) {
 +        int chunkSize = 1024 * 1024;
 +        int numberOfChunks = 256;
 +        int memoryLimit = chunkSize * 128;
@@ -418,12 +422,12 @@ index 4a659fa..f54e98d 100644
 +        PooledByteBufAllocator allocator = new PooledByteBufAllocator(false);
 +
 +        ZipBombIncomingHandler incomingHandler = new ZipBombIncomingHandler(memoryLimit);
-+        EmbeddedChannel decompressChannel = new EmbeddedChannel(new HttpContentDecompressor(0), incomingHandler);
++        EmbeddedChannel decompressChannel = new EmbeddedChannel(new HttpContentDecompressor(), incomingHandler);
 +        decompressChannel.config().setAllocator(allocator);
 +        decompressChannel.writeInbound(message);
 +        decompressChannel.writeInbound(new DefaultLastHttpContent(compressed));
 +
-+        assertEquals((long) chunkSize * numberOfChunks, incomingHandler.total);
++        Assert.assertEquals((long) chunkSize * numberOfChunks, incomingHandler.total);
 +    }
 +
 +    private static final class ZipBombIncomingHandler extends ChannelInboundHandlerAdapter {
@@ -437,8 +441,8 @@ index 4a659fa..f54e98d 100644
 +        @Override
 +        public void channelRead(ChannelHandlerContext ctx, Object msg) {
 +            PooledByteBufAllocator allocator = (PooledByteBufAllocator) ctx.alloc();
-+            assertTrue(allocator.metric().usedHeapMemory() < memoryLimit);
-+            assertTrue(allocator.metric().usedDirectMemory() < memoryLimit);
++            Assert.assertTrue(allocator.metric().usedHeapMemory() < memoryLimit);
++            Assert.assertTrue(allocator.metric().usedDirectMemory() < memoryLimit);
 +
 +            if (msg instanceof HttpContent) {
 +                HttpContent buf = (HttpContent) msg;
@@ -849,7 +853,7 @@ diff --git a/codec/src/test/java/io/netty/handler/codec/compression/AbstractInte
 index 5eaed2f..dc05eb6 100644
 --- a/codec/src/test/java/io/netty/handler/codec/compression/AbstractIntegrationTest.java
 +++ b/codec/src/test/java/io/netty/handler/codec/compression/AbstractIntegrationTest.java
-@@ -17,7 +17,10 @@ package io.netty.handler.codec.compression;
+@@ -17,7 +17,10 @@
  
  import io.netty.buffer.ByteBuf;
  import io.netty.buffer.CompositeByteBuf;
@@ -860,7 +864,7 @@ index 5eaed2f..dc05eb6 100644
  import io.netty.channel.embedded.EmbeddedChannel;
  import io.netty.util.CharsetUtil;
  import io.netty.util.ReferenceCountUtil;
-@@ -166,4 +169,63 @@ public abstract class AbstractIntegrationTest {
+@@ -166,4 +169,63 @@
          decompressed.release();
          in.release();
      }
@@ -916,8 +920,8 @@ index 5eaed2f..dc05eb6 100644
 +            total += buf.readableBytes();
 +            try {
 +                PooledByteBufAllocator allocator = (PooledByteBufAllocator) ctx.alloc();
-+                assertThat(allocator.metric().usedHeapMemory()).isLessThan(memoryLimit);
-+                assertThat(allocator.metric().usedDirectMemory()).isLessThan(memoryLimit);
++                assertThat(allocator.metric().usedHeapMemory(), lessThan((long) memoryLimit));
++                assertThat(allocator.metric().usedDirectMemory(), lessThan((long) memoryLimit));
 +            } finally {
 +                buf.release();
 +            }


=====================================
debian/patches/series
=====================================
@@ -30,3 +30,4 @@ CVE-2025-55163_before-1.patch
 CVE-2025-55163_1.patch
 CVE-2025-55163_2.patch
 CVE-2025-58057.patch
+CVE-2025-58056.patch



View it on GitLab: https://salsa.debian.org/java-team/netty/-/compare/8c1e2f3cba8b38df63b0e3e5586e935e9eac0292...39e9801808d17496c05cf5a520d6674be2927466

-- 
View it on GitLab: https://salsa.debian.org/java-team/netty/-/compare/8c1e2f3cba8b38df63b0e3e5586e935e9eac0292...39e9801808d17496c05cf5a520d6674be2927466
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-java-commits/attachments/20251125/723fd565/attachment.htm>


More information about the pkg-java-commits mailing list