mirror of
https://github.com/openembedded/meta-openembedded.git
synced 2026-04-10 20:28:17 +00:00
python3-aiohttp: patch CVE-2025-53643
Details: https://nvd.nist.gov/vuln/detail/CVE-2025-53643 Dropped changes to the test and changelog from the original commit. Signed-off-by: Ankur Tyagi <ankur.tyagi85@gmail.com> Signed-off-by: Anuj Mittal <anuj.mittal@oss.qualcomm.com>
This commit is contained in:
@@ -0,0 +1,192 @@
|
||||
From 2b45c0cc5f94a4aab25e80580db73c5da1152030 Mon Sep 17 00:00:00 2001
|
||||
From: Sam Bull <git@sambull.org>
|
||||
Date: Wed, 9 Jul 2025 19:55:22 +0100
|
||||
Subject: [PATCH] Add trailer parsing logic (#11269) (#11287)
|
||||
|
||||
CVE: CVE-2025-53643
|
||||
Upstream-Status: Backport [https://github.com/aio-libs/aiohttp/commit/e8d774f635dc6d1cd3174d0e38891da5de0e2b6a]
|
||||
|
||||
Dropped changes to the test and changelog from the original commit.
|
||||
|
||||
Signed-off-by: Ankur Tyagi <ankur.tyagi85@gmail.com>
|
||||
---
|
||||
aiohttp/http_parser.py | 70 ++++++++++++++++++++++--------------------
|
||||
aiohttp/multipart.py | 2 +-
|
||||
2 files changed, 38 insertions(+), 34 deletions(-)
|
||||
|
||||
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
|
||||
index 7a552458e..0a80c5c6d 100644
|
||||
--- a/aiohttp/http_parser.py
|
||||
+++ b/aiohttp/http_parser.py
|
||||
@@ -142,8 +142,8 @@ class HeadersParser:
|
||||
# note: "raw" does not mean inclusion of OWS before/after the field value
|
||||
raw_headers = []
|
||||
|
||||
- lines_idx = 1
|
||||
- line = lines[1]
|
||||
+ lines_idx = 0
|
||||
+ line = lines[lines_idx]
|
||||
line_count = len(lines)
|
||||
|
||||
while line:
|
||||
@@ -397,6 +397,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
lax=self.lax,
|
||||
+ headers_parser=self._headers_parser,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
@@ -416,6 +417,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
readall=True,
|
||||
auto_decompress=self._auto_decompress,
|
||||
lax=self.lax,
|
||||
+ headers_parser=self._headers_parser,
|
||||
)
|
||||
elif not empty_body and length is None and self.read_until_eof:
|
||||
payload = StreamReader(
|
||||
@@ -435,6 +437,7 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
response_with_body=self.response_with_body,
|
||||
auto_decompress=self._auto_decompress,
|
||||
lax=self.lax,
|
||||
+ headers_parser=self._headers_parser,
|
||||
)
|
||||
if not payload_parser.done:
|
||||
self._payload_parser = payload_parser
|
||||
@@ -471,6 +474,10 @@ class HttpParser(abc.ABC, Generic[_MsgT]):
|
||||
|
||||
eof = True
|
||||
data = b""
|
||||
+ if isinstance(
|
||||
+ underlying_exc, (InvalidHeader, TransferEncodingError)
|
||||
+ ):
|
||||
+ raise
|
||||
|
||||
if eof:
|
||||
start_pos = 0
|
||||
@@ -635,7 +642,7 @@ class HttpRequestParser(HttpParser[RawRequestMessage]):
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
- ) = self.parse_headers(lines)
|
||||
+ ) = self.parse_headers(lines[1:])
|
||||
|
||||
if close is None: # then the headers weren't set in the request
|
||||
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
||||
@@ -715,7 +722,7 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
|
||||
compression,
|
||||
upgrade,
|
||||
chunked,
|
||||
- ) = self.parse_headers(lines)
|
||||
+ ) = self.parse_headers(lines[1:])
|
||||
|
||||
if close is None:
|
||||
if version_o <= HttpVersion10:
|
||||
@@ -755,6 +762,8 @@ class HttpPayloadParser:
|
||||
response_with_body: bool = True,
|
||||
auto_decompress: bool = True,
|
||||
lax: bool = False,
|
||||
+ *,
|
||||
+ headers_parser: HeadersParser,
|
||||
) -> None:
|
||||
self._length = 0
|
||||
self._type = ParseState.PARSE_NONE
|
||||
@@ -763,6 +772,8 @@ class HttpPayloadParser:
|
||||
self._chunk_tail = b""
|
||||
self._auto_decompress = auto_decompress
|
||||
self._lax = lax
|
||||
+ self._headers_parser = headers_parser
|
||||
+ self._trailer_lines: list[bytes] = []
|
||||
self.done = False
|
||||
|
||||
# payload decompression wrapper
|
||||
@@ -850,7 +861,7 @@ class HttpPayloadParser:
|
||||
size_b = chunk[:i] # strip chunk-extensions
|
||||
# Verify no LF in the chunk-extension
|
||||
if b"\n" in (ext := chunk[i:pos]):
|
||||
- exc = BadHttpMessage(
|
||||
+ exc = TransferEncodingError(
|
||||
f"Unexpected LF in chunk-extension: {ext!r}"
|
||||
)
|
||||
set_exception(self.payload, exc)
|
||||
@@ -871,7 +882,7 @@ class HttpPayloadParser:
|
||||
|
||||
chunk = chunk[pos + len(SEP) :]
|
||||
if size == 0: # eof marker
|
||||
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
+ self._chunk = ChunkState.PARSE_TRAILERS
|
||||
if self._lax and chunk.startswith(b"\r"):
|
||||
chunk = chunk[1:]
|
||||
else:
|
||||
@@ -909,38 +920,31 @@ class HttpPayloadParser:
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
- # if stream does not contain trailer, after 0\r\n
|
||||
- # we should get another \r\n otherwise
|
||||
- # trailers needs to be skipped until \r\n\r\n
|
||||
- if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
||||
- head = chunk[: len(SEP)]
|
||||
- if head == SEP:
|
||||
- # end of stream
|
||||
- self.payload.feed_eof()
|
||||
- return True, chunk[len(SEP) :]
|
||||
- # Both CR and LF, or only LF may not be received yet. It is
|
||||
- # expected that CRLF or LF will be shown at the very first
|
||||
- # byte next time, otherwise trailers should come. The last
|
||||
- # CRLF which marks the end of response might not be
|
||||
- # contained in the same TCP segment which delivered the
|
||||
- # size indicator.
|
||||
- if not head:
|
||||
- return False, b""
|
||||
- if head == SEP[:1]:
|
||||
- self._chunk_tail = head
|
||||
- return False, b""
|
||||
- self._chunk = ChunkState.PARSE_TRAILERS
|
||||
-
|
||||
- # read and discard trailer up to the CRLF terminator
|
||||
if self._chunk == ChunkState.PARSE_TRAILERS:
|
||||
pos = chunk.find(SEP)
|
||||
- if pos >= 0:
|
||||
- chunk = chunk[pos + len(SEP) :]
|
||||
- self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
||||
- else:
|
||||
+ if pos < 0: # No line found
|
||||
self._chunk_tail = chunk
|
||||
return False, b""
|
||||
|
||||
+ line = chunk[:pos]
|
||||
+ chunk = chunk[pos + len(SEP) :]
|
||||
+ if SEP == b"\n": # For lax response parsing
|
||||
+ line = line.rstrip(b"\r")
|
||||
+ self._trailer_lines.append(line)
|
||||
+
|
||||
+ # \r\n\r\n found, end of stream
|
||||
+ if self._trailer_lines[-1] == b"":
|
||||
+ # Headers and trailers are defined the same way,
|
||||
+ # so we reuse the HeadersParser here.
|
||||
+ try:
|
||||
+ trailers, raw_trailers = self._headers_parser.parse_headers(
|
||||
+ self._trailer_lines
|
||||
+ )
|
||||
+ finally:
|
||||
+ self._trailer_lines.clear()
|
||||
+ self.payload.feed_eof()
|
||||
+ return True, chunk
|
||||
+
|
||||
# Read all bytes until eof
|
||||
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
||||
self.payload.feed_data(chunk, len(chunk))
|
||||
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
|
||||
index 71fc2654a..520ee539e 100644
|
||||
--- a/aiohttp/multipart.py
|
||||
+++ b/aiohttp/multipart.py
|
||||
@@ -723,7 +723,7 @@ class MultipartReader:
|
||||
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
||||
|
||||
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
||||
- lines = [b""]
|
||||
+ lines = []
|
||||
while True:
|
||||
chunk = await self._content.readline()
|
||||
chunk = chunk.strip()
|
||||
@@ -6,7 +6,9 @@ LIC_FILES_CHKSUM = "file://LICENSE.txt;md5=748073912af33aa59430d3702aa32d41"
|
||||
|
||||
SRC_URI[sha256sum] = "edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"
|
||||
|
||||
SRC_URI += "file://CVE-2024-52304.patch"
|
||||
SRC_URI += "file://CVE-2024-52304.patch \
|
||||
file://CVE-2025-53643.patch \
|
||||
"
|
||||
|
||||
PYPI_PACKAGE = "aiohttp"
|
||||
inherit python_setuptools_build_meta pypi
|
||||
|
||||
Reference in New Issue
Block a user