Commit 97e9561471 for openssl.org
commit 97e9561471057d6ffe4db098753208b6cad92ece
Author: Liu-ErMeng <liuermeng2@huawei.com>
Date: Fri Mar 13 02:29:28 2026 -0700
Fix vpsm4_ex-armv8.pl implementation bug
Load .Lsbox_magic base once via adrp+add and use plain immediate offsets for q loads,
avoiding potential low-12-bit truncation issues with #:lo12:symbol+offset.
Reviewed-by: Paul Dale <paul.dale@oracle.com>
Reviewed-by: Tomas Mraz <tomas@openssl.foundation>
MergeDate: Tue Mar 24 17:36:51 2026
(Merged from https://github.com/openssl/openssl/pull/30410)
diff --git a/crypto/sm4/asm/vpsm4_ex-armv8.pl b/crypto/sm4/asm/vpsm4_ex-armv8.pl
index e8e3373585..eac5a0f6a1 100644
--- a/crypto/sm4/asm/vpsm4_ex-armv8.pl
+++ b/crypto/sm4/asm/vpsm4_ex-armv8.pl
@@ -476,12 +476,13 @@ sub load_sbox () {
$code.=<<___;
adrp $xtmp2, .Lsbox_magic
- ldr $MaskQ, [$xtmp2, #:lo12:.Lsbox_magic]
- ldr $TAHMatQ, [$xtmp2, #:lo12:.Lsbox_magic+16]
- ldr $TALMatQ, [$xtmp2, #:lo12:.Lsbox_magic+32]
- ldr $ATAHMatQ, [$xtmp2, #:lo12:.Lsbox_magic+48]
- ldr $ATALMatQ, [$xtmp2, #:lo12:.Lsbox_magic+64]
- ldr $ANDMaskQ, [$xtmp2, #:lo12:.Lsbox_magic+80]
+ add $xtmp2, $xtmp2, #:lo12:.Lsbox_magic
+ ldr $MaskQ, [$xtmp2]
+ ldr $TAHMatQ, [$xtmp2, 16]
+ ldr $TALMatQ, [$xtmp2, 32]
+ ldr $ATAHMatQ, [$xtmp2, 48]
+ ldr $ATALMatQ, [$xtmp2, 64]
+ ldr $ANDMaskQ, [$xtmp2, 80]
___
}