Version in base suite: 3.11.16-1 Base version: python-aiohttp_3.11.16-1 Target version: python-aiohttp_3.11.16-1+deb13u1 Base file: /srv/ftp-master.debian.org/ftp/pool/main/p/python-aiohttp/python-aiohttp_3.11.16-1.dsc Target file: /srv/ftp-master.debian.org/policy/pool/main/p/python-aiohttp/python-aiohttp_3.11.16-1+deb13u1.dsc changelog | 12 patches/CVE-2025-69223.patch | 804 +++++++++++++++++++++++++++++++++++++++++++ patches/CVE-2025-69224.patch | 113 ++++++ patches/CVE-2025-69225.patch | 33 + patches/CVE-2025-69226.patch | 110 +++++ patches/CVE-2025-69227.patch | 133 +++++++ patches/CVE-2025-69228.patch | 31 + patches/CVE-2025-69229.patch | 316 ++++++++++++++++ patches/series | 7 9 files changed, 1559 insertions(+) dpkg-source: warning: cannot verify inline signature for /srv/release.debian.org/tmp/tmpo08clwet/python-aiohttp_3.11.16-1.dsc: no acceptable signature found dpkg-source: warning: cannot verify inline signature for /srv/release.debian.org/tmp/tmpo08clwet/python-aiohttp_3.11.16-1+deb13u1.dsc: no acceptable signature found diff -Nru python-aiohttp-3.11.16/debian/changelog python-aiohttp-3.11.16/debian/changelog --- python-aiohttp-3.11.16/debian/changelog 2025-04-05 12:44:31.000000000 +0000 +++ python-aiohttp-3.11.16/debian/changelog 2026-04-21 19:00:45.000000000 +0000 @@ -1,3 +1,15 @@ +python-aiohttp (3.11.16-1+deb13u1) trixie-security; urgency=medium + + * CVE-2025-69223 + * CVE-2025-69224 + * CVE-2025-69225 + * CVE-2025-69226 + * CVE-2025-69227 + * CVE-2025-69228 + * CVE-2025-69229 + + -- Moritz Mühlenhoff Tue, 21 Apr 2026 21:00:45 +0200 + python-aiohttp (3.11.16-1) unstable; urgency=medium * New upstream release diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69223.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69223.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69223.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69223.patch 2026-04-21 18:53:30.000000000 +0000 @@ -0,0 +1,804 @@ +From 2b920c39002cee0ec5b402581779bbaaf7c9138a Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 15:56:02 +0000 +Subject: [PATCH] Use decompressor max_length parameter (#11898) (#11918) + +--- python-aiohttp-3.11.16.orig/aiohttp/compression_utils.py ++++ python-aiohttp-3.11.16/aiohttp/compression_utils.py +@@ -1,5 +1,6 @@ + import asyncio + import zlib ++from abc import ABC, abstractmethod + from concurrent.futures import Executor + from typing import Optional, cast + +@@ -13,7 +14,12 @@ try: + except ImportError: # pragma: no cover + HAS_BROTLI = False + +-MAX_SYNC_CHUNK_SIZE = 1024 ++MAX_SYNC_CHUNK_SIZE = 4096 ++DEFAULT_MAX_DECOMPRESS_SIZE = 2**25 # 32MiB ++ ++# Unlimited decompression constants - different libraries use different conventions ++ZLIB_MAX_LENGTH_UNLIMITED = 0 # zlib uses 0 to mean unlimited ++ZSTD_MAX_LENGTH_UNLIMITED = -1 # zstd uses -1 to mean unlimited + + + def encoding_to_mode( +@@ -26,19 +32,37 @@ def encoding_to_mode( + return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS + + +-class ZlibBaseHandler: ++class DecompressionBaseHandler(ABC): + def __init__( + self, +- mode: int, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): +- self._mode = mode ++ """Base class for decompression handlers.""" + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + ++ @abstractmethod ++ def decompress_sync( ++ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED ++ ) -> bytes: ++ """Decompress the given data.""" ++ ++ async def decompress( ++ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED ++ ) -> bytes: ++ """Decompress the given data.""" ++ if ( ++ self._max_sync_chunk_size is not None ++ and len(data) > self._max_sync_chunk_size ++ ): ++ return await asyncio.get_event_loop().run_in_executor( ++ self._executor, self.decompress_sync, data, max_length ++ ) ++ return self.decompress_sync(data, max_length) + +-class ZLibCompressor(ZlibBaseHandler): ++ ++class ZLibCompressor: + def __init__( + self, + encoding: Optional[str] = None, +@@ -49,14 +73,12 @@ class ZLibCompressor(ZlibBaseHandler): + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): +- super().__init__( +- mode=( +- encoding_to_mode(encoding, suppress_deflate_header) +- if wbits is None +- else wbits +- ), +- executor=executor, +- max_sync_chunk_size=max_sync_chunk_size, ++ self._executor = executor ++ self._max_sync_chunk_size = max_sync_chunk_size ++ self._mode = ( ++ encoding_to_mode(encoding, suppress_deflate_header) ++ if wbits is None ++ else wbits + ) + if level is None: + self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) +@@ -96,7 +118,7 @@ class ZLibCompressor(ZlibBaseHandler): + return self._compressor.flush(mode) + + +-class ZLibDecompressor(ZlibBaseHandler): ++class ZLibDecompressor(DecompressionBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, +@@ -104,32 +126,15 @@ class ZLibDecompressor(ZlibBaseHandler): + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): +- super().__init__( +- mode=encoding_to_mode(encoding, suppress_deflate_header), +- executor=executor, +- max_sync_chunk_size=max_sync_chunk_size, +- ) ++ super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) ++ self._mode = encoding_to_mode(encoding, suppress_deflate_header) + self._decompressor = zlib.decompressobj(wbits=self._mode) + +- def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: ++ def decompress_sync( ++ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED ++ ) -> bytes: + return self._decompressor.decompress(data, max_length) + +- async def decompress(self, data: bytes, max_length: int = 0) -> bytes: +- """Decompress the data and return the decompressed bytes. +- +- If the data size is large than the max_sync_chunk_size, the decompression +- will be done in the executor. Otherwise, the decompression will be done +- in the event loop. +- """ +- if ( +- self._max_sync_chunk_size is not None +- and len(data) > self._max_sync_chunk_size +- ): +- return await asyncio.get_running_loop().run_in_executor( +- self._executor, self._decompressor.decompress, data, max_length +- ) +- return self.decompress_sync(data, max_length) +- + def flush(self, length: int = 0) -> bytes: + return ( + self._decompressor.flush(length) +@@ -150,24 +155,34 @@ class ZLibDecompressor(ZlibBaseHandler): + return self._decompressor.unused_data + + +-class BrotliDecompressor: ++class BrotliDecompressor(DecompressionBaseHandler): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' +- def __init__(self) -> None: ++ def __init__( ++ self, ++ executor: Optional[Executor] = None, ++ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, ++ ) -> None: ++ """Decompress data using the Brotli library.""" + if not HAS_BROTLI: + raise RuntimeError( + "The brotli decompression is not available. " + "Please install `Brotli` module" + ) + self._obj = brotli.Decompressor() ++ super().__init__(executor=executor, max_sync_chunk_size=max_sync_chunk_size) + +- def decompress_sync(self, data: bytes) -> bytes: ++ def decompress_sync( ++ self, data: bytes, max_length: int = ZLIB_MAX_LENGTH_UNLIMITED ++ ) -> bytes: ++ """Decompress the given data.""" + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: ++ """Flush the decompressor.""" + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" +--- python-aiohttp-3.11.16.orig/aiohttp/http_exceptions.py ++++ python-aiohttp-3.11.16/aiohttp/http_exceptions.py +@@ -74,6 +74,10 @@ class ContentLengthError(PayloadEncoding + """Not enough data for satisfy content length header.""" + + ++class DecompressSizeError(PayloadEncodingError): ++ """Decompressed size exceeds the configured limit.""" ++ ++ + class LineTooLong(BadHttpMessage): + def __init__( + self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" +--- python-aiohttp-3.11.16.orig/aiohttp/http_parser.py ++++ python-aiohttp-3.11.16/aiohttp/http_parser.py +@@ -26,7 +26,7 @@ from yarl import URL + + from . import hdrs + from .base_protocol import BaseProtocol +-from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor ++from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor, DEFAULT_MAX_DECOMPRESS_SIZE + from .helpers import ( + _EXC_SENTINEL, + DEBUG, +@@ -42,6 +42,7 @@ from .http_exceptions import ( + BadStatusLine, + ContentEncodingError, + ContentLengthError, ++ DecompressSizeError, + InvalidHeader, + InvalidURLError, + LineTooLong, +@@ -951,7 +952,12 @@ class DeflateBuffer: + + decompressor: Any + +- def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: ++ def __init__( ++ self, ++ out: StreamReader, ++ encoding: Optional[str], ++ max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, ++ ) -> None: + self.out = out + self.size = 0 + self.encoding = encoding +@@ -968,6 +974,8 @@ class DeflateBuffer: + else: + self.decompressor = ZLibDecompressor(encoding=encoding) + ++ self._max_decompress_size = max_decompress_size ++ + def set_exception( + self, + exc: BaseException, +@@ -996,7 +1004,10 @@ class DeflateBuffer: + ) + + try: +- chunk = self.decompressor.decompress_sync(chunk) ++ # Decompress with limit + 1 so we can detect if output exceeds limit ++ chunk = self.decompressor.decompress_sync( ++ chunk, max_length=self._max_decompress_size + 1 ++ ) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding +@@ -1004,6 +1015,13 @@ class DeflateBuffer: + + self._started_decoding = True + ++ # Check if decompression limit was exceeded ++ if len(chunk) > self._max_decompress_size: ++ raise DecompressSizeError( ++ "Decompressed data exceeds the configured limit of %d bytes" ++ % self._max_decompress_size ++ ) ++ + if chunk: + self.out.feed_data(chunk, len(chunk)) + +--- python-aiohttp-3.11.16.orig/aiohttp/multipart.py ++++ python-aiohttp-3.11.16/aiohttp/multipart.py +@@ -27,7 +27,12 @@ from urllib.parse import parse_qsl, unqu + + from multidict import CIMultiDict, CIMultiDictProxy + +-from .compression_utils import ZLibCompressor, ZLibDecompressor ++from .abc import AbstractStreamWriter ++from .compression_utils import ( ++ DEFAULT_MAX_DECOMPRESS_SIZE, ++ ZLibCompressor, ++ ZLibDecompressor, ++) + from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, +@@ -270,6 +275,7 @@ class BodyPartReader: + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, ++ max_decompress_size: int = DEFAULT_MAX_DECOMPRESS_SIZE, + ) -> None: + self.headers = headers + self._boundary = boundary +@@ -286,6 +292,7 @@ class BodyPartReader: + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} ++ self._max_decompress_size = max_decompress_size + + def __aiter__(self: Self) -> Self: + return self +@@ -315,7 +322,7 @@ class BodyPartReader: + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: +- return self.decode(data) ++ return await self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: +@@ -495,7 +502,7 @@ class BodyPartReader: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + +- def decode(self, data: bytes) -> bytes: ++ async def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding +@@ -505,18 +512,18 @@ class BodyPartReader: + data = self._decode_content_transfer(data) + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: +- return self._decode_content(data) ++ return await self._decode_content(data) + return data + +- def _decode_content(self, data: bytes) -> bytes: ++ async def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + if encoding == "identity": + return data + if encoding in {"deflate", "gzip"}: +- return ZLibDecompressor( ++ return await ZLibDecompressor( + encoding=encoding, + suppress_deflate_header=True, +- ).decompress_sync(data) ++ ).decompress(data, max_length=self._max_decompress_size) + + raise RuntimeError(f"unknown content encoding: {encoding}") + +@@ -576,11 +583,11 @@ class BodyPartReaderPayload(Payload): + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + +- async def write(self, writer: Any) -> None: ++ async def write(self, writer: AbstractStreamWriter) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: +- await writer.write(field.decode(chunk)) ++ await writer.write(await field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +@@ -985,7 +992,9 @@ class MultipartWriter(Payload): + for part, _e, _te in self._parts + ) + +- async def write(self, writer: Any, close_boundary: bool = True) -> None: ++ async def write( ++ self, writer: AbstractStreamWriter, close_boundary: bool = True ++ ) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + if self._is_form_data: +@@ -1014,7 +1023,7 @@ class MultipartWriter(Payload): + + + class MultipartPayloadWriter: +- def __init__(self, writer: Any) -> None: ++ def __init__(self, writer: AbstractStreamWriter) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Optional[ZLibCompressor] = None +--- python-aiohttp-3.11.16.orig/aiohttp/web_request.py ++++ python-aiohttp-3.11.16/aiohttp/web_request.py +@@ -740,7 +740,7 @@ class BaseRequest(MutableMapping[str, An + ) + chunk = await field.read_chunk(size=2**16) + while chunk: +- chunk = field.decode(chunk) ++ chunk = await field.decode(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) + size += len(chunk) + if 0 < max_size < size: +--- python-aiohttp-3.11.16.orig/docs/spelling_wordlist.txt ++++ python-aiohttp-3.11.16/docs/spelling_wordlist.txt +@@ -185,6 +185,7 @@ lowercased + Mako + manylinux + metadata ++MiB + microservice + middleware + middlewares +--- python-aiohttp-3.11.16.orig/tests/test_client_functional.py ++++ python-aiohttp-3.11.16/tests/test_client_functional.py +@@ -12,9 +12,23 @@ import sys + import tarfile + import time + import zipfile ++import zlib + from typing import Any, AsyncIterator, Awaitable, Callable, List, Type + from unittest import mock + ++try: ++ try: ++ import brotlicffi as brotli ++ except ImportError: ++ import brotli ++except ImportError: ++ brotli = None # pragma: no cover ++ ++try: ++ from backports.zstd import ZstdCompressor ++except ImportError: ++ ZstdCompressor = None # type: ignore[assignment,misc] # pragma: no cover ++ + import pytest + from multidict import MultiDict + from yarl import URL +@@ -33,7 +47,9 @@ from aiohttp.client_exceptions import ( + TooManyRedirects, + ) + from aiohttp.client_reqrep import ClientRequest ++from aiohttp.compression_utils import DEFAULT_MAX_DECOMPRESS_SIZE + from aiohttp.connector import Connection ++from aiohttp.http_exceptions import DecompressSizeError + from aiohttp.http_writer import StreamWriter + from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer + from aiohttp.test_utils import TestClient, TestServer, unused_port +@@ -2148,8 +2164,102 @@ async def test_bad_payload_compression(a + resp.close() + + +-async def test_bad_payload_chunked_encoding(aiohttp_client) -> None: +- async def handler(request): ++async def test_payload_decompress_size_limit(aiohttp_client: AiohttpClient) -> None: ++ """Test that decompression size limit triggers DecompressSizeError. ++ ++ When a compressed payload expands beyond the configured limit, ++ we raise DecompressSizeError. ++ """ ++ # Create a highly compressible payload that exceeds the decompression limit. ++ # 64MiB of repeated bytes compresses to ~32KB but expands beyond the ++ # 32MiB per-call limit. ++ original = b"A" * (64 * 2**20) ++ compressed = zlib.compress(original) ++ assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE ++ ++ async def handler(request: web.Request) -> web.Response: ++ # Send compressed data with Content-Encoding header ++ resp = web.Response(body=compressed) ++ resp.headers["Content-Encoding"] = "deflate" ++ return resp ++ ++ app = web.Application() ++ app.router.add_get("/", handler) ++ client = await aiohttp_client(app) ++ ++ async with client.get("/") as resp: ++ assert resp.status == 200 ++ ++ with pytest.raises(aiohttp.ClientPayloadError) as exc_info: ++ await resp.read() ++ ++ assert isinstance(exc_info.value.__cause__, DecompressSizeError) ++ assert "Decompressed data exceeds" in str(exc_info.value.__cause__) ++ ++ ++@pytest.mark.skipif(brotli is None, reason="brotli is not installed") ++async def test_payload_decompress_size_limit_brotli( ++ aiohttp_client: AiohttpClient, ++) -> None: ++ """Test that brotli decompression size limit triggers DecompressSizeError.""" ++ assert brotli is not None ++ # Create a highly compressible payload that exceeds the decompression limit. ++ original = b"A" * (64 * 2**20) ++ compressed = brotli.compress(original) ++ assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE ++ ++ async def handler(request: web.Request) -> web.Response: ++ resp = web.Response(body=compressed) ++ resp.headers["Content-Encoding"] = "br" ++ return resp ++ ++ app = web.Application() ++ app.router.add_get("/", handler) ++ client = await aiohttp_client(app) ++ ++ async with client.get("/") as resp: ++ assert resp.status == 200 ++ ++ with pytest.raises(aiohttp.ClientPayloadError) as exc_info: ++ await resp.read() ++ ++ assert isinstance(exc_info.value.__cause__, DecompressSizeError) ++ assert "Decompressed data exceeds" in str(exc_info.value.__cause__) ++ ++ ++@pytest.mark.skipif(ZstdCompressor is None, reason="backports.zstd is not installed") ++async def test_payload_decompress_size_limit_zstd( ++ aiohttp_client: AiohttpClient, ++) -> None: ++ """Test that zstd decompression size limit triggers DecompressSizeError.""" ++ assert ZstdCompressor is not None ++ # Create a highly compressible payload that exceeds the decompression limit. ++ original = b"A" * (64 * 2**20) ++ compressor = ZstdCompressor() ++ compressed = compressor.compress(original) + compressor.flush() ++ assert len(original) > DEFAULT_MAX_DECOMPRESS_SIZE ++ ++ async def handler(request: web.Request) -> web.Response: ++ resp = web.Response(body=compressed) ++ resp.headers["Content-Encoding"] = "zstd" ++ return resp ++ ++ app = web.Application() ++ app.router.add_get("/", handler) ++ client = await aiohttp_client(app) ++ ++ async with client.get("/") as resp: ++ assert resp.status == 200 ++ ++ with pytest.raises(aiohttp.ClientPayloadError) as exc_info: ++ await resp.read() ++ ++ assert isinstance(exc_info.value.__cause__, DecompressSizeError) ++ assert "Decompressed data exceeds" in str(exc_info.value.__cause__) ++ ++ ++async def test_bad_payload_chunked_encoding(aiohttp_client: AiohttpClient) -> None: ++ async def handler(request: web.Request) -> web.StreamResponse: + resp = web.StreamResponse() + resp.force_close() + resp._length_check = False +--- python-aiohttp-3.11.16.orig/tests/test_http_parser.py ++++ python-aiohttp-3.11.16/tests/test_http_parser.py +@@ -2,6 +2,7 @@ + + import asyncio + import re ++import zlib + from contextlib import nullcontext + from typing import Any, Dict, List + from unittest import mock +@@ -1870,3 +1871,36 @@ class TestDeflateBuffer: + dbuf.feed_eof() + + assert buf.at_eof() ++ ++ @pytest.mark.parametrize( ++ "chunk_size", ++ [1024, 2**14, 2**16], # 1KB, 16KB, 64KB ++ ids=["1KB", "16KB", "64KB"], ++ ) ++ async def test_streaming_decompress_large_payload( ++ self, protocol: BaseProtocol, chunk_size: int ++ ) -> None: ++ """Test that large payloads decompress correctly when streamed in chunks. ++ ++ This simulates real HTTP streaming where compressed data arrives in ++ small network chunks. Each chunk's decompressed output should be within ++ the max_decompress_size limit, allowing full recovery of the original data. ++ """ ++ # Create a large payload (3MiB) that compresses well ++ original = b"A" * (3 * 2**20) ++ compressed = zlib.compress(original) ++ ++ buf = aiohttp.StreamReader(protocol, 2**16, loop=asyncio.get_running_loop()) ++ dbuf = DeflateBuffer(buf, "deflate") ++ ++ # Feed compressed data in chunks (simulating network streaming) ++ for i in range(0, len(compressed), chunk_size): ++ chunk = compressed[i : i + chunk_size] ++ dbuf.feed_data(chunk, len(chunk)) ++ ++ dbuf.feed_eof() ++ ++ # Read all decompressed data ++ result = b"".join(buf._buffer) ++ assert len(result) == len(original) ++ assert result == original +--- python-aiohttp-3.11.16.orig/tests/test_multipart.py ++++ python-aiohttp-3.11.16/tests/test_multipart.py +@@ -10,6 +10,7 @@ import pytest + + import aiohttp + from aiohttp import payload ++from aiohttp.abc import AbstractStreamWriter + from aiohttp.hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, +@@ -33,14 +34,14 @@ def buf(): + + + @pytest.fixture +-def stream(buf): +- writer = mock.Mock() ++def stream(buf: bytearray) -> AbstractStreamWriter: ++ writer = mock.create_autospec(AbstractStreamWriter, instance=True, spec_set=True) + + async def write(chunk): + buf.extend(chunk) + + writer.write.side_effect = write +- return writer ++ return writer # type: ignore[no-any-return] + + + @pytest.fixture +@@ -386,7 +387,7 @@ class TestPartReader: + result = b"" + while not obj.at_eof(): + chunk = await obj.read_chunk(size=6) +- result += obj.decode(chunk) ++ result += await obj.decode(chunk) + assert b"Time to Relax!" == result + + @pytest.mark.parametrize("encoding", ("binary", "8bit", "7bit")) +@@ -1072,7 +1073,9 @@ async def test_writer(writer) -> None: + assert writer.boundary == ":" + + +-async def test_writer_serialize_io_chunk(buf, stream, writer) -> None: ++async def test_writer_serialize_io_chunk( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + with io.BytesIO(b"foobarbaz") as file_handle: + writer.append(file_handle) + await writer.write(stream) +@@ -1082,7 +1085,9 @@ async def test_writer_serialize_io_chunk + ) + + +-async def test_writer_serialize_json(buf, stream, writer) -> None: ++async def test_writer_serialize_json( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + writer.append_json({"привет": "мир"}) + await writer.write(stream) + assert ( +@@ -1091,7 +1096,9 @@ async def test_writer_serialize_json(buf + ) + + +-async def test_writer_serialize_form(buf, stream, writer) -> None: ++async def test_writer_serialize_form( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + data = [("foo", "bar"), ("foo", "baz"), ("boo", "zoo")] + writer.append_form(data) + await writer.write(stream) +@@ -1099,7 +1106,9 @@ async def test_writer_serialize_form(buf + assert b"foo=bar&foo=baz&boo=zoo" in buf + + +-async def test_writer_serialize_form_dict(buf, stream, writer) -> None: ++async def test_writer_serialize_form_dict( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + data = {"hello": "мир"} + writer.append_form(data) + await writer.write(stream) +@@ -1107,7 +1116,9 @@ async def test_writer_serialize_form_dic + assert b"hello=%D0%BC%D0%B8%D1%80" in buf + + +-async def test_writer_write(buf, stream, writer) -> None: ++async def test_writer_write( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + writer.append("foo-bar-baz") + writer.append_json({"test": "passed"}) + writer.append_form({"test": "passed"}) +@@ -1153,7 +1164,9 @@ async def test_writer_write(buf, stream, + ) == bytes(buf) + + +-async def test_writer_write_no_close_boundary(buf, stream) -> None: ++async def test_writer_write_no_close_boundary( ++ buf: bytearray, stream: AbstractStreamWriter ++) -> None: + writer = aiohttp.MultipartWriter(boundary=":") + writer.append("foo-bar-baz") + writer.append_json({"test": "passed"}) +@@ -1185,12 +1198,18 @@ async def test_writer_write_no_close_bou + ) == bytes(buf) + + +-async def test_writer_write_no_parts(buf, stream, writer) -> None: ++async def test_writer_write_no_parts( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + await writer.write(stream) + assert b"--:--\r\n" == bytes(buf) + + +-async def test_writer_serialize_with_content_encoding_gzip(buf, stream, writer): ++async def test_writer_serialize_with_content_encoding_gzip( ++ buf: bytearray, ++ stream: AbstractStreamWriter, ++ writer: aiohttp.MultipartWriter, ++) -> None: + writer.append("Time to Relax!", {CONTENT_ENCODING: "gzip"}) + await writer.write(stream) + headers, message = bytes(buf).split(b"\r\n\r\n", 1) +@@ -1206,7 +1225,9 @@ async def test_writer_serialize_with_con + assert b"Time to Relax!" == data + + +-async def test_writer_serialize_with_content_encoding_deflate(buf, stream, writer): ++async def test_writer_serialize_with_content_encoding_deflate( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + writer.append("Time to Relax!", {CONTENT_ENCODING: "deflate"}) + await writer.write(stream) + headers, message = bytes(buf).split(b"\r\n\r\n", 1) +@@ -1220,7 +1241,9 @@ async def test_writer_serialize_with_con + assert thing == message + + +-async def test_writer_serialize_with_content_encoding_identity(buf, stream, writer): ++async def test_writer_serialize_with_content_encoding_identity( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + thing = b"\x0b\xc9\xccMU(\xc9W\x08J\xcdI\xacP\x04\x00" + writer.append(thing, {CONTENT_ENCODING: "identity"}) + await writer.write(stream) +@@ -1235,12 +1258,16 @@ async def test_writer_serialize_with_con + assert thing == message.split(b"\r\n")[0] + + +-def test_writer_serialize_with_content_encoding_unknown(buf, stream, writer): ++def test_writer_serialize_with_content_encoding_unknown( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + with pytest.raises(RuntimeError): + writer.append("Time to Relax!", {CONTENT_ENCODING: "snappy"}) + + +-async def test_writer_with_content_transfer_encoding_base64(buf, stream, writer): ++async def test_writer_with_content_transfer_encoding_base64( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "base64"}) + await writer.write(stream) + headers, message = bytes(buf).split(b"\r\n\r\n", 1) +@@ -1253,7 +1280,9 @@ async def test_writer_with_content_trans + assert b"VGltZSB0byBSZWxheCE=" == message.split(b"\r\n")[0] + + +-async def test_writer_content_transfer_encoding_quote_printable(buf, stream, writer): ++async def test_writer_content_transfer_encoding_quote_printable( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + writer.append("Привет, мир!", {CONTENT_TRANSFER_ENCODING: "quoted-printable"}) + await writer.write(stream) + headers, message = bytes(buf).split(b"\r\n\r\n", 1) +@@ -1269,7 +1298,9 @@ async def test_writer_content_transfer_e + ) + + +-def test_writer_content_transfer_encoding_unknown(buf, stream, writer) -> None: ++def test_writer_content_transfer_encoding_unknown( ++ buf: bytearray, stream: AbstractStreamWriter, writer: aiohttp.MultipartWriter ++) -> None: + with pytest.raises(RuntimeError): + writer.append("Time to Relax!", {CONTENT_TRANSFER_ENCODING: "unknown"}) + +@@ -1393,7 +1424,9 @@ class TestMultipartWriter: + with aiohttp.MultipartWriter(boundary=":") as writer: + writer.append(None) + +- async def test_write_preserves_content_disposition(self, buf, stream) -> None: ++ async def test_write_preserves_content_disposition( ++ self, buf: bytearray, stream: AbstractStreamWriter ++ ) -> None: + with aiohttp.MultipartWriter(boundary=":") as writer: + part = writer.append(b"foo", headers={CONTENT_TYPE: "test/passed"}) + part.set_content_disposition("form-data", filename="bug") +@@ -1410,7 +1443,9 @@ class TestMultipartWriter: + ) + assert message == b"foo\r\n--:--\r\n" + +- async def test_preserve_content_disposition_header(self, buf, stream): ++ async def test_preserve_content_disposition_header( ++ self, buf: bytearray, stream: AbstractStreamWriter ++ ) -> None: + # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 + with pathlib.Path(__file__).open("rb") as fobj: + with aiohttp.MultipartWriter("form-data", boundary=":") as writer: +@@ -1434,7 +1469,9 @@ class TestMultipartWriter: + b'Content-Disposition: attachments; filename="bug.py"' + ) + +- async def test_set_content_disposition_override(self, buf, stream): ++ async def test_set_content_disposition_override( ++ self, buf: bytearray, stream: AbstractStreamWriter ++ ) -> None: + # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 + with pathlib.Path(__file__).open("rb") as fobj: + with aiohttp.MultipartWriter("form-data", boundary=":") as writer: +@@ -1458,7 +1495,9 @@ class TestMultipartWriter: + b'Content-Disposition: attachments; filename="bug.py"' + ) + +- async def test_reset_content_disposition_header(self, buf, stream): ++ async def test_reset_content_disposition_header( ++ self, buf: bytearray, stream: AbstractStreamWriter ++ ) -> None: + # https://github.com/aio-libs/aiohttp/pull/3475#issuecomment-451072381 + with pathlib.Path(__file__).open("rb") as fobj: + with aiohttp.MultipartWriter("form-data", boundary=":") as writer: diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69224.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69224.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69224.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69224.patch 2026-04-21 18:54:07.000000000 +0000 @@ -0,0 +1,113 @@ +From 32677f2adfd907420c078dda6b79225c6f4ebce0 Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 00:02:45 +0000 +Subject: [PATCH] Reject non-ascii characters in some headers (#11886) (#11902) + +--- python-aiohttp-3.11.16.orig/aiohttp/_http_parser.pyx ++++ python-aiohttp-3.11.16/aiohttp/_http_parser.pyx +@@ -421,7 +421,8 @@ cdef class HttpParser: + headers = CIMultiDictProxy(CIMultiDict(self._headers)) + + if self._cparser.type == cparser.HTTP_REQUEST: +- allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES ++ h_upg = headers.get("upgrade", "") ++ allowed = upgrade and h_upg.isascii() and h_upg.lower() in ALLOWED_UPGRADES + if allowed or self._cparser.method == cparser.HTTP_CONNECT: + self._upgraded = True + else: +@@ -436,8 +437,7 @@ cdef class HttpParser: + enc = self._content_encoding + if enc is not None: + self._content_encoding = None +- enc = enc.lower() +- if enc in ('gzip', 'deflate', 'br'): ++ if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: + encoding = enc + + if self._cparser.type == cparser.HTTP_REQUEST: +--- python-aiohttp-3.11.16.orig/aiohttp/http_parser.py ++++ python-aiohttp-3.11.16/aiohttp/http_parser.py +@@ -233,7 +233,9 @@ class HeadersParser: + + def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" +- return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} ++ u = headers.get(hdrs.UPGRADE, "") ++ # .lower() can transform non-ascii characters. ++ return u.isascii() and u.lower() in {"tcp", "websocket"} + + + class HttpParser(abc.ABC, Generic[_MsgT]): +@@ -537,11 +539,9 @@ class HttpParser(abc.ABC, Generic[_MsgT] + upgrade = True + + # encoding +- enc = headers.get(hdrs.CONTENT_ENCODING) +- if enc: +- enc = enc.lower() +- if enc in ("gzip", "deflate", "br"): +- encoding = enc ++ enc = headers.get(hdrs.CONTENT_ENCODING, "") ++ if enc.isascii() and enc.lower() in {"gzip", "deflate", "br", "zstd"}: ++ encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) +@@ -658,7 +658,9 @@ class HttpRequestParser(HttpParser[RawRe + ) + + def _is_chunked_te(self, te: str) -> bool: +- if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": ++ te = te.rsplit(",", maxsplit=1)[-1].strip(" \t") ++ # .lower() transforms some non-ascii chars, so must check first. ++ if te.isascii() and te.lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") +--- python-aiohttp-3.11.16.orig/tests/test_http_parser.py ++++ python-aiohttp-3.11.16/tests/test_http_parser.py +@@ -496,7 +496,21 @@ def test_request_chunked(parser) -> None + assert isinstance(payload, streams.StreamReader) + + +-def test_request_te_chunked_with_content_length(parser: Any) -> None: ++def test_te_header_non_ascii(parser: HttpRequestParser) -> None: ++ # K = Kelvin sign, not valid ascii. ++ text = "GET /test HTTP/1.1\r\nTransfer-Encoding: chunKed\r\n\r\n" ++ with pytest.raises(http_exceptions.BadHttpMessage): ++ parser.feed_data(text.encode()) ++ ++ ++def test_upgrade_header_non_ascii(parser: HttpRequestParser) -> None: ++ # K = Kelvin sign, not valid ascii. ++ text = "GET /test HTTP/1.1\r\nUpgrade: websocKet\r\n\r\n" ++ messages, upgrade, tail = parser.feed_data(text.encode()) ++ assert not upgrade ++ ++ ++def test_request_te_chunked_with_content_length(parser: HttpRequestParser) -> None: + text = ( + b"GET /test HTTP/1.1\r\n" + b"content-length: 1234\r\n" +@@ -586,6 +600,21 @@ def test_compression_brotli(parser) -> N + assert msg.compression == "br" + + ++@pytest.mark.parametrize( ++ "enc", ++ ( ++ "zstd".encode(), # "st".upper() == "ST" ++ "deflate".encode(), # "fl".upper() == "FL" ++ ), ++) ++def test_compression_non_ascii(parser: HttpRequestParser, enc: bytes) -> None: ++ text = b"GET /test HTTP/1.1\r\ncontent-encoding: " + enc + b"\r\n\r\n" ++ messages, upgrade, tail = parser.feed_data(text) ++ msg = messages[0][0] ++ # Non-ascii input should not evaluate to a valid encoding scheme. ++ assert msg.compression is None ++ ++ + def test_compression_unknown(parser) -> None: + text = b"GET /test HTTP/1.1\r\ncontent-encoding: compress\r\n\r\n" + messages, upgrade, tail = parser.feed_data(text) diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69225.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69225.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69225.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69225.patch 2026-04-21 18:54:45.000000000 +0000 @@ -0,0 +1,33 @@ +From c7b7a044f88c71cefda95ec75cdcfaa4792b3b96 Mon Sep 17 00:00:00 2001 +From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> +Date: Sat, 3 Jan 2026 00:39:41 +0000 +Subject: [PATCH] [PR #11887/7a067d19 backport][3.13] Reject non-ascii digits + in Range header (#11903) + +--- python-aiohttp-3.11.16.orig/aiohttp/web_request.py ++++ python-aiohttp-3.11.16/aiohttp/web_request.py +@@ -607,7 +607,7 @@ class BaseRequest(MutableMapping[str, An + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" +- start, end = re.findall(pattern, rng)[0] ++ start, end = re.findall(pattern, rng, re.ASCII)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + +--- python-aiohttp-3.11.16.orig/tests/test_web_request.py ++++ python-aiohttp-3.11.16/tests/test_web_request.py +@@ -243,6 +243,13 @@ def test_range_to_slice_tail_stop() -> N + assert req.content[req.http_range] == payload[-500:] + + ++def test_range_non_ascii() -> None: ++ # ५ = DEVANAGARI DIGIT FIVE ++ req = make_mocked_request("GET", "/", headers=CIMultiDict([("RANGE", "bytes=4-५")])) ++ with pytest.raises(ValueError, match="range not in acceptable format"): ++ req.http_range ++ ++ + def test_non_keepalive_on_http10() -> None: + req = make_mocked_request("GET", "/", version=HttpVersion(1, 0)) + assert not req.keep_alive diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69226.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69226.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69226.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69226.patch 2026-04-21 18:55:23.000000000 +0000 @@ -0,0 +1,110 @@ +From f2a86fd5ac0383000d1715afddfa704413f0711e Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 01:55:05 +0000 +Subject: [PATCH] Reject static URLs that traverse outside static root (#11888) + (#11906) + +--- python-aiohttp-3.11.16.orig/aiohttp/web_urldispatcher.py ++++ python-aiohttp-3.11.16/aiohttp/web_urldispatcher.py +@@ -7,6 +7,7 @@ import html + import inspect + import keyword + import os ++import platform + import re + import sys + import warnings +@@ -94,6 +95,7 @@ ROUTE_RE: Final[Pattern[str]] = re.compi + ) + PATH_SEP: Final[str] = re.escape("/") + ++IS_WINDOWS: Final[bool] = platform.system() == "Windows" + + _ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] + _Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +@@ -649,7 +651,12 @@ class StaticResource(PrefixResource): + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.path_safe + method = request.method +- if not path.startswith(self._prefix2) and path != self._prefix: ++ # We normalise here to avoid matches that traverse below the static root. ++ # e.g. /static/../../../../home/user/webapp/static/ ++ norm_path = os.path.normpath(path) ++ if IS_WINDOWS: ++ norm_path = norm_path.replace("\\", "/") ++ if not norm_path.startswith(self._prefix2) and norm_path != self._prefix: + return None, set() + + allowed_methods = self._allowed_methods +@@ -666,14 +673,7 @@ class StaticResource(PrefixResource): + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: +- rel_url = request.match_info["filename"] +- filename = Path(rel_url) +- if filename.anchor: +- # rel_url is an absolute name like +- # /static/\\machine_name\c$ or /static/D:\path +- # where the static dir is totally different +- raise HTTPForbidden() +- ++ filename = request.match_info["filename"] + unresolved_path = self._directory.joinpath(filename) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( +--- python-aiohttp-3.11.16.orig/tests/test_urldispatch.py ++++ python-aiohttp-3.11.16/tests/test_urldispatch.py +@@ -1,4 +1,5 @@ + import pathlib ++import platform + import re + from collections.abc import Container, Iterable, Mapping, MutableMapping, Sized + from typing import NoReturn +@@ -1041,7 +1042,22 @@ async def test_405_for_resource_adapter( + assert (None, {"HEAD", "GET"}) == ret + + +-async def test_check_allowed_method_for_found_resource(router) -> None: ++@pytest.mark.skipif(platform.system() == "Windows", reason="Different path formats") ++async def test_static_resource_outside_traversal(router: web.UrlDispatcher) -> None: ++ """Test relative path traversing outside root does not resolve.""" ++ static_file = pathlib.Path(aiohttp.__file__) ++ request_path = "/st" + "/.." * (len(static_file.parts) - 2) + str(static_file) ++ assert pathlib.Path(request_path).resolve() == static_file ++ ++ resource = router.add_static("/st", static_file.parent) ++ ret = await resource.resolve(make_mocked_request("GET", request_path)) ++ # Should not resolve, otherwise filesystem information may be leaked. ++ assert (None, set()) == ret ++ ++ ++async def test_check_allowed_method_for_found_resource( ++ router: web.UrlDispatcher, ++) -> None: + handler = make_handler() + resource = router.add_resource("/") + resource.add_route("GET", handler) +--- python-aiohttp-3.11.16.orig/tests/test_web_sendfile_functional.py ++++ python-aiohttp-3.11.16/tests/test_web_sendfile_functional.py +@@ -635,7 +635,7 @@ async def test_static_file_directory_tra + + url_abspath = "/static/" + str(full_path.resolve()) + resp = await client.get(url_abspath) +- assert 403 == resp.status ++ assert resp.status == 404 + await resp.release() + + await client.close() +--- python-aiohttp-3.11.16.orig/tests/test_web_urldispatcher.py ++++ python-aiohttp-3.11.16/tests/test_web_urldispatcher.py +@@ -838,8 +838,8 @@ async def test_static_absolute_url( + here = pathlib.Path(__file__).parent + app.router.add_static("/static", here) + client = await aiohttp_client(app) +- resp = await client.get("/static/" + str(file_path.resolve())) +- assert resp.status == 403 ++ async with client.get("/static/" + str(file_path.resolve())) as resp: ++ assert resp.status == 404 + + + async def test_for_issue_5250( diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69227.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69227.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69227.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69227.patch 2026-04-21 18:57:27.000000000 +0000 @@ -0,0 +1,133 @@ +From bc1319ec3cbff9438a758951a30907b072561259 Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 04:53:29 +0000 +Subject: [PATCH] Replace asserts with exceptions (#11897) (#11914) + +--- python-aiohttp-3.11.16.orig/aiohttp/multipart.py ++++ python-aiohttp-3.11.16/aiohttp/multipart.py +@@ -365,11 +365,8 @@ class BodyPartReader: + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True +- if self._at_eof: +- clrf = await self._content.readline() +- assert ( +- b"\r\n" == clrf +- ), "reader did not read all the data or it is malformed" ++ if self._at_eof and await self._content.readline() != b"\r\n": ++ raise ValueError("Reader did not read all the data or it is malformed") + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: +@@ -398,7 +395,8 @@ class BodyPartReader: + while len(chunk) < self._boundary_len: + chunk += await self._content.read(size) + self._content_eof += int(self._content.at_eof()) +- assert self._content_eof < 3, "Reading after EOF" ++ if self._content_eof > 2: ++ raise ValueError("Reading after EOF") + if self._content_eof: + break + if len(chunk) > size: +--- python-aiohttp-3.11.16.orig/aiohttp/web_request.py ++++ python-aiohttp-3.11.16/aiohttp/web_request.py +@@ -721,14 +721,13 @@ class BaseRequest(MutableMapping[str, An + multipart = await self.multipart() + max_size = self._client_max_size + +- field = await multipart.next() +- while field is not None: ++ while (field := await multipart.next()) is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): +- assert field.name is not None +- ++ if field.name is None: ++ raise ValueError("Multipart field missing name.") + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. +@@ -779,8 +778,6 @@ class BaseRequest(MutableMapping[str, An + raise ValueError( + "To decode nested multipart you need to use custom reader", + ) +- +- field = await multipart.next() + else: + data = await self.read() + if data: +--- python-aiohttp-3.11.16.orig/tests/test_multipart.py ++++ python-aiohttp-3.11.16/tests/test_multipart.py +@@ -7,6 +7,7 @@ import zlib + from unittest import mock + + import pytest ++from multidict import CIMultiDict, CIMultiDictProxy + + import aiohttp + from aiohttp import payload +@@ -202,11 +203,21 @@ class TestPartReader: + with Stream(data) as stream: + obj = aiohttp.BodyPartReader(BOUNDARY, {}, stream) + result = b"" +- with pytest.raises(AssertionError): ++ with pytest.raises(ValueError): + for _ in range(4): + result += await obj.read_chunk(7) + assert data == result + ++ async def test_read_with_content_length_malformed_crlf(self) -> None: ++ # Content-Length is correct but data after content is not \r\n ++ content = b"Hello" ++ h = CIMultiDictProxy(CIMultiDict({"CONTENT-LENGTH": str(len(content))})) ++ # Malformed: "XX" instead of "\r\n" after content ++ with Stream(content + b"XX--:--") as stream: ++ obj = aiohttp.BodyPartReader(BOUNDARY, h, stream) ++ with pytest.raises(ValueError, match="malformed"): ++ await obj.read() ++ + async def test_read_boundary_with_incomplete_chunk(self) -> None: + with Stream(b"") as stream: + +--- python-aiohttp-3.11.16.orig/tests/test_web_request.py ++++ python-aiohttp-3.11.16/tests/test_web_request.py +@@ -10,6 +10,7 @@ from multidict import CIMultiDict, CIMul + from yarl import URL + + from aiohttp import HttpVersion ++from aiohttp.base_protocol import BaseProtocol + from aiohttp.http_parser import RawRequestMessage + from aiohttp.streams import StreamReader + from aiohttp.test_utils import make_mocked_request +@@ -655,7 +656,28 @@ async def test_multipart_formdata(protoc + assert dict(result) == {"a": "b", "c": "d"} + + +-async def test_multipart_formdata_file(protocol) -> None: ++async def test_multipart_formdata_field_missing_name(protocol: BaseProtocol) -> None: ++ # Ensure ValueError is raised when Content-Disposition has no name ++ payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) ++ payload.feed_data( ++ b"-----------------------------326931944431359\r\n" ++ b"Content-Disposition: form-data\r\n" # Missing name! ++ b"\r\n" ++ b"value\r\n" ++ b"-----------------------------326931944431359--\r\n" ++ ) ++ content_type = ( ++ "multipart/form-data; boundary=---------------------------326931944431359" ++ ) ++ payload.feed_eof() ++ req = make_mocked_request( ++ "POST", "/", headers={"CONTENT-TYPE": content_type}, payload=payload ++ ) ++ with pytest.raises(ValueError, match="Multipart field missing name"): ++ await req.post() ++ ++ ++async def test_multipart_formdata_file(protocol: BaseProtocol) -> None: + # Make sure file uploads work, even without a content type + payload = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop()) + payload.feed_data( diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69228.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69228.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69228.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69228.patch 2026-04-21 18:59:30.000000000 +0000 @@ -0,0 +1,31 @@ +From b7dbd35375aedbcd712cbae8ad513d56d11cce60 Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 02:48:45 +0000 +Subject: [PATCH] Enforce client_max_size over entire multipart form (#11889) + (#11908) + +--- python-aiohttp-3.11.16.orig/aiohttp/web_request.py ++++ python-aiohttp-3.11.16/aiohttp/web_request.py +@@ -721,8 +721,8 @@ class BaseRequest(MutableMapping[str, An + multipart = await self.multipart() + max_size = self._client_max_size + ++ size = 0 + while (field := await multipart.next()) is not None: +- size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): +--- python-aiohttp-3.11.16.orig/tests/test_web_functional.py ++++ python-aiohttp-3.11.16/tests/test_web_functional.py +@@ -1695,8 +1695,8 @@ async def test_app_max_client_size(aioht + await resp.release() + + +-async def test_app_max_client_size_adjusted(aiohttp_client) -> None: +- async def handler(request): ++async def test_app_max_client_size_adjusted(aiohttp_client: AiohttpClient) -> None: ++ async def handler(request: web.Request) -> web.Response: + await request.post() + return web.Response(body=b"ok") + diff -Nru python-aiohttp-3.11.16/debian/patches/CVE-2025-69229.patch python-aiohttp-3.11.16/debian/patches/CVE-2025-69229.patch --- python-aiohttp-3.11.16/debian/patches/CVE-2025-69229.patch 1970-01-01 00:00:00.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/CVE-2025-69229.patch 2026-04-21 19:00:35.000000000 +0000 @@ -0,0 +1,316 @@ +From dc3170b56904bdf814228fae70a5501a42a6c712 Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 03:57:17 +0000 +Subject: [PATCH] Use collections.deque for chunk splits (#11892) (#11912) + +From 4ed97a4e46eaf61bd0f05063245f613469700229 Mon Sep 17 00:00:00 2001 +From: Sam Bull +Date: Sat, 3 Jan 2026 15:23:14 +0000 +Subject: [PATCH] Limit number of chunks before pausing reading (#11894) + (#11916) + +--- python-aiohttp-3.11.16.orig/aiohttp/streams.py ++++ python-aiohttp-3.11.16/aiohttp/streams.py +@@ -116,6 +116,8 @@ class StreamReader(AsyncStreamReaderMixi + "_protocol", + "_low_water", + "_high_water", ++ "_low_water_chunks", ++ "_high_water_chunks", + "_loop", + "_size", + "_cursor", +@@ -145,10 +147,15 @@ class StreamReader(AsyncStreamReaderMixi + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() ++ # Ensure high_water_chunks >= 3 so it's always > low_water_chunks. ++ self._high_water_chunks = max(3, limit // 4) ++ # Use max(2, ...) because there's always at least 1 chunk split remaining ++ # (the current position), so we need low_water >= 2 to allow resume. ++ self._low_water_chunks = max(2, self._high_water_chunks // 2) + self._loop = loop + self._size = 0 + self._cursor = 0 +- self._http_chunk_splits: Optional[List[int]] = None ++ self._http_chunk_splits: Optional[Deque[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False +@@ -295,7 +302,7 @@ class StreamReader(AsyncStreamReaderMixi + raise RuntimeError( + "Called begin_http_chunk_receiving when some data was already fed" + ) +- self._http_chunk_splits = [] ++ self._http_chunk_splits = collections.deque() + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: +@@ -321,6 +328,15 @@ class StreamReader(AsyncStreamReaderMixi + + self._http_chunk_splits.append(self.total_bytes) + ++ # If we get too many small chunks before self._high_water is reached, then any ++ # .read() call becomes computationally expensive, and could block the event loop ++ # for too long, hence an additional self._high_water_chunks here. ++ if ( ++ len(self._http_chunk_splits) > self._high_water_chunks ++ and not self._protocol._reading_paused ++ ): ++ self._protocol.pause_reading() ++ + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: +@@ -454,7 +470,7 @@ class StreamReader(AsyncStreamReaderMixi + raise self._exception + + while self._http_chunk_splits: +- pos = self._http_chunk_splits.pop(0) ++ pos = self._http_chunk_splits.popleft() + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: +@@ -527,9 +543,16 @@ class StreamReader(AsyncStreamReaderMixi + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: +- chunk_splits.pop(0) ++ chunk_splits.popleft() + +- if self._size < self._low_water and self._protocol._reading_paused: ++ if ( ++ self._protocol._reading_paused ++ and self._size < self._low_water ++ and ( ++ self._http_chunk_splits is None ++ or len(self._http_chunk_splits) < self._low_water_chunks ++ ) ++ ): + self._protocol.resume_reading() + return data + +--- python-aiohttp-3.11.16.orig/tests/test_http_parser.py ++++ python-aiohttp-3.11.16/tests/test_http_parser.py +@@ -1266,7 +1266,8 @@ def test_http_request_chunked_payload(pa + parser.feed_data(b"4\r\ndata\r\n4\r\nline\r\n0\r\n\r\n") + + assert b"dataline" == b"".join(d for d in payload._buffer) +- assert [4, 8] == payload._http_chunk_splits ++ assert payload._http_chunk_splits is not None ++ assert [4, 8] == list(payload._http_chunk_splits) + assert payload.is_eof() + + +@@ -1281,7 +1282,8 @@ def test_http_request_chunked_payload_an + ) + + assert b"dataline" == b"".join(d for d in payload._buffer) +- assert [4, 8] == payload._http_chunk_splits ++ assert payload._http_chunk_splits is not None ++ assert [4, 8] == list(payload._http_chunk_splits) + assert payload.is_eof() + + assert len(messages) == 1 +@@ -1305,12 +1307,13 @@ def test_http_request_chunked_payload_ch + parser.feed_data(b"test: test\r\n") + + assert b"dataline" == b"".join(d for d in payload._buffer) +- assert [4, 8] == payload._http_chunk_splits ++ assert payload._http_chunk_splits is not None ++ assert [4, 8] == list(payload._http_chunk_splits) + assert not payload.is_eof() + + parser.feed_data(b"\r\n") + assert b"dataline" == b"".join(d for d in payload._buffer) +- assert [4, 8] == payload._http_chunk_splits ++ assert [4, 8] == list(payload._http_chunk_splits) + assert payload.is_eof() + + +@@ -1321,7 +1324,8 @@ def test_parse_chunked_payload_chunk_ext + parser.feed_data(b"4;test\r\ndata\r\n4\r\nline\r\n0\r\ntest: test\r\n\r\n") + + assert b"dataline" == b"".join(d for d in payload._buffer) +- assert [4, 8] == payload._http_chunk_splits ++ assert payload._http_chunk_splits is not None ++ assert [4, 8] == list(payload._http_chunk_splits) + assert payload.is_eof() + + +--- python-aiohttp-3.11.16.orig/tests/test_streams.py ++++ python-aiohttp-3.11.16/tests/test_streams.py +@@ -1552,3 +1552,173 @@ async def test_stream_reader_iter_chunks + + def test_isinstance_check() -> None: + assert isinstance(streams.EMPTY_PAYLOAD, streams.StreamReader) ++ ++ ++async def test_stream_reader_pause_on_high_water_chunks( ++ protocol: mock.Mock, ++) -> None: ++ """Test that reading is paused when chunk count exceeds high water mark.""" ++ loop = asyncio.get_event_loop() ++ # Use small limit so high_water_chunks is small: limit // 4 = 10 ++ stream = streams.StreamReader(protocol, limit=40, loop=loop) ++ ++ assert stream._high_water_chunks == 10 ++ assert stream._low_water_chunks == 5 ++ ++ # Feed chunks until we exceed high_water_chunks ++ for i in range(12): ++ stream.begin_http_chunk_receiving() ++ stream.feed_data(b"x") # 1 byte per chunk ++ stream.end_http_chunk_receiving() ++ ++ # pause_reading should have been called when chunk count exceeded 10 ++ protocol.pause_reading.assert_called() ++ ++ ++async def test_stream_reader_resume_on_low_water_chunks( ++ protocol: mock.Mock, ++) -> None: ++ """Test that reading resumes when chunk count drops below low water mark.""" ++ loop = asyncio.get_event_loop() ++ # Use small limit so high_water_chunks is small: limit // 4 = 10 ++ stream = streams.StreamReader(protocol, limit=40, loop=loop) ++ ++ assert stream._high_water_chunks == 10 ++ assert stream._low_water_chunks == 5 ++ ++ # Feed chunks until we exceed high_water_chunks ++ for i in range(12): ++ stream.begin_http_chunk_receiving() ++ stream.feed_data(b"x") # 1 byte per chunk ++ stream.end_http_chunk_receiving() ++ ++ # Simulate that reading was paused ++ protocol._reading_paused = True ++ protocol.pause_reading.reset_mock() ++ ++ # Read data to reduce both size and chunk count ++ # Reading will consume chunks and reduce _http_chunk_splits ++ data = await stream.read(10) ++ assert data == b"xxxxxxxxxx" ++ ++ # resume_reading should have been called when both size and chunk count ++ # dropped below their respective low water marks ++ protocol.resume_reading.assert_called() ++ ++ ++async def test_stream_reader_no_resume_when_chunks_still_high( ++ protocol: mock.Mock, ++) -> None: ++ """Test that reading doesn't resume if chunk count is still above low water.""" ++ loop = asyncio.get_event_loop() ++ # Use small limit so high_water_chunks is small: limit // 4 = 10 ++ stream = streams.StreamReader(protocol, limit=40, loop=loop) ++ ++ # Feed many chunks ++ for i in range(12): ++ stream.begin_http_chunk_receiving() ++ stream.feed_data(b"x") ++ stream.end_http_chunk_receiving() ++ ++ # Simulate that reading was paused ++ protocol._reading_paused = True ++ ++ # Read only a few bytes - chunk count will still be high ++ data = await stream.read(2) ++ assert data == b"xx" ++ ++ # resume_reading should NOT be called because chunk count is still >= low_water_chunks ++ protocol.resume_reading.assert_not_called() ++ ++ ++async def test_stream_reader_read_non_chunked_response( ++ protocol: mock.Mock, ++) -> None: ++ """Test that non-chunked responses work correctly (no chunk tracking).""" ++ loop = asyncio.get_event_loop() ++ stream = streams.StreamReader(protocol, limit=40, loop=loop) ++ ++ # Non-chunked: just feed data without begin/end_http_chunk_receiving ++ stream.feed_data(b"Hello World") ++ ++ # _http_chunk_splits should be None for non-chunked responses ++ assert stream._http_chunk_splits is None ++ ++ # Reading should work without issues ++ data = await stream.read(5) ++ assert data == b"Hello" ++ ++ data = await stream.read(6) ++ assert data == b" World" ++ ++ ++async def test_stream_reader_resume_non_chunked_when_paused( ++ protocol: mock.Mock, ++) -> None: ++ """Test that resume works for non-chunked responses when paused due to size.""" ++ loop = asyncio.get_event_loop() ++ # Small limit so we can trigger pause via size ++ stream = streams.StreamReader(protocol, limit=10, loop=loop) ++ ++ # Feed data that exceeds high_water (limit * 2 = 20) ++ stream.feed_data(b"x" * 25) ++ ++ # Simulate that reading was paused due to size ++ protocol._reading_paused = True ++ protocol.pause_reading.assert_called() ++ ++ # Read enough to drop below low_water (limit = 10) ++ data = await stream.read(20) ++ assert data == b"x" * 20 ++ ++ # resume_reading should be called (size is now 5 < low_water 10) ++ protocol.resume_reading.assert_called() ++ ++ ++@pytest.mark.parametrize("limit", [1, 2, 4]) ++async def test_stream_reader_small_limit_resumes_reading( ++ protocol: mock.Mock, ++ limit: int, ++) -> None: ++ """Test that small limits still allow resume_reading to be called. ++ ++ Even with very small limits, high_water_chunks should be at least 3 ++ and low_water_chunks should be at least 2, with high > low to ensure ++ proper flow control. ++ """ ++ loop = asyncio.get_event_loop() ++ stream = streams.StreamReader(protocol, limit=limit, loop=loop) ++ ++ # Verify minimum thresholds are enforced and high > low ++ assert stream._high_water_chunks >= 3 ++ assert stream._low_water_chunks >= 2 ++ assert stream._high_water_chunks > stream._low_water_chunks ++ ++ # Set up pause/resume side effects ++ def pause_reading() -> None: ++ protocol._reading_paused = True ++ ++ protocol.pause_reading.side_effect = pause_reading ++ ++ def resume_reading() -> None: ++ protocol._reading_paused = False ++ ++ protocol.resume_reading.side_effect = resume_reading ++ ++ # Feed 4 chunks (triggers pause at > high_water_chunks which is >= 3) ++ for char in b"abcd": ++ stream.begin_http_chunk_receiving() ++ stream.feed_data(bytes([char])) ++ stream.end_http_chunk_receiving() ++ ++ # Reading should now be paused ++ assert protocol._reading_paused is True ++ assert protocol.pause_reading.called ++ ++ # Read all data - should resume (chunk count drops below low_water_chunks) ++ data = stream.read_nowait() ++ assert data == b"abcd" ++ assert stream._size == 0 ++ ++ protocol.resume_reading.assert_called() ++ assert protocol._reading_paused is False diff -Nru python-aiohttp-3.11.16/debian/patches/series python-aiohttp-3.11.16/debian/patches/series --- python-aiohttp-3.11.16/debian/patches/series 2025-04-05 12:44:31.000000000 +0000 +++ python-aiohttp-3.11.16/debian/patches/series 2026-04-21 19:00:04.000000000 +0000 @@ -3,3 +3,10 @@ 0004-remove-sphinxcontrib.towncrier-for-now.patch 0005-avoid-aiohttp-theme.patch 0006-use-local-documentation.patch +CVE-2025-69223.patch +CVE-2025-69224.patch +CVE-2025-69225.patch +CVE-2025-69226.patch +CVE-2025-69227.patch +CVE-2025-69228.patch +CVE-2025-69229.patch