blob: 50c55b2bcb6df12ef01ec67030a9692f8a85ba16 [file] [log] [blame]
From 85eda9b6cfaa01e2c614a1b958d9bfcc8bf2fe03 Mon Sep 17 00:00:00 2001
From: Sasha Levin <sashal@kernel.org>
Date: Tue, 9 Dec 2025 03:19:45 +0100
Subject: ARM: 9464/1: fix input-only operand modification in
load_unaligned_zeropad()
From: Liyuan Pang <pangliyuan1@huawei.com>
[ Upstream commit edb924a7211c9aa7a4a415e03caee4d875e46b8e ]
In the inline assembly inside load_unaligned_zeropad(), the "addr" is
constrained as input-only operand. The compiler assumes that on exit
from the asm statement these operands contain the same values as they
had before executing the statement, but when kernel page fault happened, the assembly fixup code "bic %2 %2, #0x3" modify the value of "addr", which may lead to an unexpected behavior.
Use a temporary variable "tmp" to handle it, instead of modifying the
input-only operand, just like what arm64's load_unaligned_zeropad()
does.
Fixes: b9a50f74905a ("ARM: 7450/1: dcache: select DCACHE_WORD_ACCESS for little-endian ARMv6+ CPUs")
Co-developed-by: Xie Yuanbin <xieyuanbin1@huawei.com>
Signed-off-by: Xie Yuanbin <xieyuanbin1@huawei.com>
Signed-off-by: Liyuan Pang <pangliyuan1@huawei.com>
Signed-off-by: Russell King (Oracle) <rmk+kernel@armlinux.org.uk>
Signed-off-by: Sasha Levin <sashal@kernel.org>
---
arch/arm/include/asm/word-at-a-time.h | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/arch/arm/include/asm/word-at-a-time.h b/arch/arm/include/asm/word-at-a-time.h
index 352ab213520d2..2e6d0b4349f47 100644
--- a/arch/arm/include/asm/word-at-a-time.h
+++ b/arch/arm/include/asm/word-at-a-time.h
@@ -66,7 +66,7 @@ static inline unsigned long find_zero(unsigned long mask)
*/
static inline unsigned long load_unaligned_zeropad(const void *addr)
{
- unsigned long ret, offset;
+ unsigned long ret, tmp;
/* Load word from unaligned pointer addr */
asm(
@@ -74,9 +74,9 @@ static inline unsigned long load_unaligned_zeropad(const void *addr)
"2:\n"
" .pushsection .text.fixup,\"ax\"\n"
" .align 2\n"
- "3: and %1, %2, #0x3\n"
- " bic %2, %2, #0x3\n"
- " ldr %0, [%2]\n"
+ "3: bic %1, %2, #0x3\n"
+ " ldr %0, [%1]\n"
+ " and %1, %2, #0x3\n"
" lsl %1, %1, #0x3\n"
#ifndef __ARMEB__
" lsr %0, %0, %1\n"
@@ -89,7 +89,7 @@ static inline unsigned long load_unaligned_zeropad(const void *addr)
" .align 3\n"
" .long 1b, 3b\n"
" .popsection"
- : "=&r" (ret), "=&r" (offset)
+ : "=&r" (ret), "=&r" (tmp)
: "r" (addr), "Qo" (*(unsigned long *)addr));
return ret;
--
2.51.0