Your message dated Mon, 28 Jul 2025 20:55:04 +0000 with message-id <E1ugUsC-001Dzr-1t@respighi.debian.org> and subject line unblock starlette has caused the Debian Bug report #1110051, regarding unblock: starlette/0.46.1-3 to be marked as done. This means that you claim that the problem has been dealt with. If this is not the case it is now your responsibility to reopen the Bug report if necessary, and/or fix the problem forthwith. (NB: If you are a system administrator and have no idea what this message is talking about, this may indicate a serious mail system misconfiguration somewhere. Please contact owner@bugs.debian.org immediately.) -- 1110051: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=1110051 Debian Bug Tracking System Contact owner@bugs.debian.org with problems
--- Begin Message ---
- To: Debian Bug Tracking System <submit@bugs.debian.org>
- Subject: unblock: starlette/0.46.1-3
- From: Piotr Ożarowski <piotr@debian.org>
- Date: Mon, 28 Jul 2025 21:24:35 +0200
- Message-id: <[🔎] 4lvgsqsdbavwymqqtt2jmhujvkwr5iny52aadmsryh3dhbmxcf@kezq4al2wrmc>
Package: release.debian.org Severity: normal X-Debbugs-Cc: starlette@packages.debian.org Control: affects -1 + src:starlette User: release.debian.org@packages.debian.org Usertags: unblock Please unblock package starlette This upload fixes CVE-2025-54121. The fix is taken from upstream repository¹ (also released as 0.47.2). I'm attaching 0002-fix-cve-2024-28849-async-write.patch. More than half of this patch contains tests for the fix. The debdiff with changes from package in testing contains also wrongly indented (sorry, fixed in git already) debian/changelog and cosmetic changes in an older patch (made by `gbp pq export` and not touching the source code). unblock starlette/0.46.1-3 [¹] https://github.com/encode/starlette/commit/9f7ec2eb512fcc3fe90b43cb9dd9e1d08696bec1From: Yang Wang <yang.wang@windriver.com> Date: Mon, 28 Jul 2025 11:41:09 +0200 Subject: fix-cve-2024-28849-async-write Fix CVE-2025-54121: Avoid event loop blocking during multipart file uploads by writing to disk using thread pool to prevent synchronous blocking when SpooledTemporaryFile rolls over to disk. (Closes: #1109805) --- starlette/datastructures.py | 22 +++++++++++++--- tests/test_formparsers.py | 63 ++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 80 insertions(+), 5 deletions(-) diff --git a/starlette/datastructures.py b/starlette/datastructures.py index f5d74d2..9957090 100644 --- a/starlette/datastructures.py +++ b/starlette/datastructures.py @@ -424,6 +424,10 @@ class UploadFile: self.size = size self.headers = headers or Headers() + # Capture max size from SpooledTemporaryFile if one is provided. This slightly speeds up future checks. + # Note 0 means unlimited mirroring SpooledTemporaryFile's __init__ + self._max_mem_size = getattr(self.file, "_max_size", 0) + @property def content_type(self) -> str | None: return self.headers.get("content-type", None) @@ -434,14 +438,24 @@ class UploadFile: rolled_to_disk = getattr(self.file, "_rolled", True) return not rolled_to_disk + def _will_roll(self, size_to_add: int) -> bool: + # If we're not in_memory then we will always roll + if not self._in_memory: + return True + + # Check for SpooledTemporaryFile._max_size + future_size = self.file.tell() + size_to_add + return bool(future_size > self._max_mem_size) if self._max_mem_size else False + async def write(self, data: bytes) -> None: + new_data_len = len(data) if self.size is not None: - self.size += len(data) + self.size += new_data_len - if self._in_memory: - self.file.write(data) - else: + if self._will_roll(new_data_len): await run_in_threadpool(self.file.write, data) + else: + self.file.write(data) async def read(self, size: int = -1) -> bytes: if self._in_memory: diff --git a/tests/test_formparsers.py b/tests/test_formparsers.py index b18fd6c..63577d6 100644 --- a/tests/test_formparsers.py +++ b/tests/test_formparsers.py @@ -1,15 +1,20 @@ from __future__ import annotations import os +import threading +from collections.abc import Generator import typing from contextlib import nullcontext as does_not_raise +from io import BytesIO from pathlib import Path +from tempfile import SpooledTemporaryFile +from unittest import mock import pytest from starlette.applications import Starlette from starlette.datastructures import UploadFile -from starlette.formparsers import MultiPartException, _user_safe_decode +from starlette.formparsers import MultiPartException, MultiPartParser, _user_safe_decode from starlette.requests import Request from starlette.responses import JSONResponse from starlette.routing import Mount @@ -104,6 +109,22 @@ async def app_read_body(scope: Scope, receive: Receive, send: Send) -> None: await response(scope, receive, send) +async def app_monitor_thread(scope: Scope, receive: Receive, send: Send) -> None: + """Helper app to monitor what thread the app was called on. + + This can later be used to validate thread/event loop operations. + """ + request = Request(scope, receive) + + # Make sure we parse the form + await request.form() + await request.close() + + # Send back the current thread id + response = JSONResponse({"thread_ident": threading.current_thread().ident}) + await response(scope, receive, send) + + def make_app_max_parts(max_files: int = 1000, max_fields: int = 1000, max_part_size: int = 1024 * 1024) -> ASGIApp: async def app(scope: Scope, receive: Receive, send: Send) -> None: request = Request(scope, receive) @@ -302,6 +323,46 @@ def test_multipart_request_mixed_files_and_data(tmpdir: Path, test_client_factor "field1": "value1", } +class ThreadTrackingSpooledTemporaryFile(SpooledTemporaryFile[bytes]): + """Helper class to track which threads performed the rollover operation. + + This is not threadsafe/multi-test safe. + """ + + rollover_threads: typing.ClassVar[set[int | None]] = set() + + def rollover(self) -> None: + ThreadTrackingSpooledTemporaryFile.rollover_threads.add(threading.current_thread().ident) + super().rollover() + + +@pytest.fixture +def mock_spooled_temporary_file() -> Generator[None]: + try: + with mock.patch("starlette.formparsers.SpooledTemporaryFile", ThreadTrackingSpooledTemporaryFile): + yield + finally: + ThreadTrackingSpooledTemporaryFile.rollover_threads.clear() + + +def test_multipart_request_large_file_rollover_in_background_thread( + mock_spooled_temporary_file: None, test_client_factory: TestClientFactory +) -> None: + """Test that Spooled file rollovers happen in background threads.""" + data = BytesIO(b" " * (MultiPartParser.spool_max_size + 1)) + + client = test_client_factory(app_monitor_thread) + response = client.post("/", files=[("test_large", data)]) + assert response.status_code == 200 + + # Parse the event thread id from the API response and ensure we have one + app_thread_ident = response.json().get("thread_ident") + assert app_thread_ident is not None + + # Ensure the app thread was not the same as the rollover one and that a rollover thread exists + assert app_thread_ident not in ThreadTrackingSpooledTemporaryFile.rollover_threads + assert len(ThreadTrackingSpooledTemporaryFile.rollover_threads) == 1 + def test_multipart_request_with_charset_for_filename(tmpdir: Path, test_client_factory: TestClientFactory) -> None: client = test_client_factory(app)Attachment: signature.asc
Description: PGP signature
--- End Message ---
--- Begin Message ---
- To: 1110051-done@bugs.debian.org
- Subject: unblock starlette
- From: Sebastian Ramacher <sramacher@respighi.debian.org>
- Date: Mon, 28 Jul 2025 20:55:04 +0000
- Message-id: <E1ugUsC-001Dzr-1t@respighi.debian.org>
Unblocked.
--- End Message ---