aboutsummaryrefslogtreecommitdiff
path: root/string/arm
diff options
context:
space:
mode:
Diffstat (limited to 'string/arm')
-rw-r--r--string/arm/asmdefs.h477
-rw-r--r--string/arm/check-arch.S7
-rw-r--r--string/arm/memchr.S46
-rw-r--r--string/arm/memcpy.S6
-rw-r--r--string/arm/memset.S2
-rw-r--r--string/arm/strcmp-armv6m.S6
-rw-r--r--string/arm/strcmp.S59
-rw-r--r--string/arm/strcpy.c2
-rw-r--r--string/arm/strlen-armv6t2.S18
9 files changed, 579 insertions, 44 deletions
diff --git a/string/arm/asmdefs.h b/string/arm/asmdefs.h
new file mode 100644
index 0000000..e311888
--- /dev/null
+++ b/string/arm/asmdefs.h
@@ -0,0 +1,477 @@
+/*
+ * Macros for asm code. Arm version.
+ *
+ * Copyright (c) 2019-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
+ */
+
+#ifndef _ASMDEFS_H
+#define _ASMDEFS_H
+
+/* Check whether leaf function PAC signing has been requested in the
+ -mbranch-protect compile-time option. */
+#define LEAF_PROTECT_BIT 2
+
+#ifdef __ARM_FEATURE_PAC_DEFAULT
+# define HAVE_PAC_LEAF \
+ ((__ARM_FEATURE_PAC_DEFAULT & (1 << LEAF_PROTECT_BIT)) && 1)
+#else
+# define HAVE_PAC_LEAF 0
+#endif
+
+/* Provide default parameters for PAC-code handling in leaf-functions. */
+#if HAVE_PAC_LEAF
+# ifndef PAC_LEAF_PUSH_IP
+# define PAC_LEAF_PUSH_IP 1
+# endif
+#else /* !HAVE_PAC_LEAF */
+# undef PAC_LEAF_PUSH_IP
+# define PAC_LEAF_PUSH_IP 0
+#endif /* HAVE_PAC_LEAF */
+
+#define STACK_ALIGN_ENFORCE 0
+
+/******************************************************************************
+* Implementation of the prologue and epilogue assembler macros and their
+* associated helper functions.
+*
+* These functions add support for the following:
+*
+* - M-profile branch target identification (BTI) landing-pads when compiled
+* with `-mbranch-protection=bti'.
+* - PAC-signing and verification instructions, depending on hardware support
+* and whether the PAC-signing of leaf functions has been requested via the
+* `-mbranch-protection=pac-ret+leaf' compiler argument.
+* - 8-byte stack alignment preservation at function entry, defaulting to the
+* value of STACK_ALIGN_ENFORCE.
+*
+* Notes:
+* - Prologue stack alignment is implemented by detecting a push with an odd
+* number of registers and prepending a dummy register to the list.
+* - If alignment is attempted on a list containing r0, compilation will result
+* in an error.
+* - If alignment is attempted in a list containing r1, r0 will be prepended to
+* the register list and r0 will be restored prior to function return. for
+* functions with non-void return types, this will result in the corruption of
+* the result register.
+* - Stack alignment is enforced via the following helper macro call-chain:
+*
+* {prologue|epilogue} ->_align8 -> _preprocess_reglist ->
+* _preprocess_reglist1 -> {_prologue|_epilogue}
+*
+* - Debug CFI directives are automatically added to prologues and epilogues,
+* assisted by `cfisavelist' and `cfirestorelist', respectively.
+*
+* Arguments:
+* prologue
+* --------
+* - first - If `last' specified, this serves as start of general-purpose
+* register (GPR) range to push onto stack, otherwise represents
+* single GPR to push onto stack. If omitted, no GPRs pushed
+* onto stack at prologue.
+* - last - If given, specifies inclusive upper-bound of GPR range.
+* - push_ip - Determines whether IP register is to be pushed to stack at
+* prologue. When pac-signing is requested, this holds the
+* the pac-key. Either 1 or 0 to push or not push, respectively.
+* Default behavior: Set to value of PAC_LEAF_PUSH_IP macro.
+* - push_lr - Determines whether to push lr to the stack on function entry.
+* Either 1 or 0 to push or not push, respectively.
+* - align8 - Whether to enforce alignment. Either 1 or 0, with 1 requesting
+* alignment.
+*
+* epilogue
+* --------
+* The epilogue should be called passing the same arguments as those passed to
+* the prologue to ensure the stack is not corrupted on function return.
+*
+* Usage examples:
+*
+* prologue push_ip=1 -> push {ip}
+* epilogue push_ip=1, align8=1 -> pop {r2, ip}
+* prologue push_ip=1, push_lr=1 -> push {ip, lr}
+* epilogue 1 -> pop {r1}
+* prologue 1, align8=1 -> push {r0, r1}
+* epilogue 1, push_ip=1 -> pop {r1, ip}
+* prologue 1, 4 -> push {r1-r4}
+* epilogue 1, 4 push_ip=1 -> pop {r1-r4, ip}
+*
+******************************************************************************/
+
+/* Emit .cfi_restore directives for a consecutive sequence of registers. */
+ .macro cfirestorelist first, last
+ .cfi_restore \last
+ .if \last-\first
+ cfirestorelist \first, \last-1
+ .endif
+ .endm
+
+/* Emit .cfi_offset directives for a consecutive sequence of registers. */
+ .macro cfisavelist first, last, index=1
+ .cfi_offset \last, -4*(\index)
+ .if \last-\first
+ cfisavelist \first, \last-1, \index+1
+ .endif
+ .endm
+
+.macro _prologue first=-1, last=-1, push_ip=PAC_LEAF_PUSH_IP, push_lr=0
+ .if \push_ip & 1 != \push_ip
+ .error "push_ip may be either 0 or 1"
+ .endif
+ .if \push_lr & 1 != \push_lr
+ .error "push_lr may be either 0 or 1"
+ .endif
+ .if \first != -1
+ .if \last == -1
+ /* Upper-bound not provided: Set upper = lower. */
+ _prologue \first, \first, \push_ip, \push_lr
+ .exitm
+ .endif
+ .endif
+#if HAVE_PAC_LEAF
+# if __ARM_FEATURE_BTI_DEFAULT
+ pacbti ip, lr, sp
+# else
+ pac ip, lr, sp
+# endif /* __ARM_FEATURE_BTI_DEFAULT */
+ .cfi_register 143, 12
+#else
+# if __ARM_FEATURE_BTI_DEFAULT
+ bti
+# endif /* __ARM_FEATURE_BTI_DEFAULT */
+#endif /* HAVE_PAC_LEAF */
+ .if \first != -1
+ .if \last != \first
+ .if \last >= 13
+ .error "SP cannot be in the save list"
+ .endif
+ .if \push_ip
+ .if \push_lr
+ /* Case 1: push register range, ip and lr registers. */
+ push {r\first-r\last, ip, lr}
+ .cfi_adjust_cfa_offset ((\last-\first)+3)*4
+ .cfi_offset 14, -4
+ .cfi_offset 143, -8
+ cfisavelist \first, \last, 3
+ .else // !\push_lr
+ /* Case 2: push register range and ip register. */
+ push {r\first-r\last, ip}
+ .cfi_adjust_cfa_offset ((\last-\first)+2)*4
+ .cfi_offset 143, -4
+ cfisavelist \first, \last, 2
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 3: push register range and lr register. */
+ push {r\first-r\last, lr}
+ .cfi_adjust_cfa_offset ((\last-\first)+2)*4
+ .cfi_offset 14, -4
+ cfisavelist \first, \last, 2
+ .else // !\push_lr
+ /* Case 4: push register range. */
+ push {r\first-r\last}
+ .cfi_adjust_cfa_offset ((\last-\first)+1)*4
+ cfisavelist \first, \last, 1
+ .endif
+ .endif
+ .else // \last == \first
+ .if \push_ip
+ .if \push_lr
+ /* Case 5: push single GP register plus ip and lr registers. */
+ push {r\first, ip, lr}
+ .cfi_adjust_cfa_offset 12
+ .cfi_offset 14, -4
+ .cfi_offset 143, -8
+ cfisavelist \first, \first, 3
+ .else // !\push_lr
+ /* Case 6: push single GP register plus ip register. */
+ push {r\first, ip}
+ .cfi_adjust_cfa_offset 8
+ .cfi_offset 143, -4
+ cfisavelist \first, \first, 2
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 7: push single GP register plus lr register. */
+ push {r\first, lr}
+ .cfi_adjust_cfa_offset 8
+ .cfi_offset 14, -4
+ cfisavelist \first, \first, 2
+ .else // !\push_lr
+ /* Case 8: push single GP register. */
+ push {r\first}
+ .cfi_adjust_cfa_offset 4
+ cfisavelist \first, \first, 1
+ .endif
+ .endif
+ .endif
+ .else // \first == -1
+ .if \push_ip
+ .if \push_lr
+ /* Case 9: push ip and lr registers. */
+ push {ip, lr}
+ .cfi_adjust_cfa_offset 8
+ .cfi_offset 14, -4
+ .cfi_offset 143, -8
+ .else // !\push_lr
+ /* Case 10: push ip register. */
+ push {ip}
+ .cfi_adjust_cfa_offset 4
+ .cfi_offset 143, -4
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 11: push lr register. */
+ push {lr}
+ .cfi_adjust_cfa_offset 4
+ .cfi_offset 14, -4
+ .endif
+ .endif
+ .endif
+.endm
+
+.macro _epilogue first=-1, last=-1, push_ip=PAC_LEAF_PUSH_IP, push_lr=0
+ .if \push_ip & 1 != \push_ip
+ .error "push_ip may be either 0 or 1"
+ .endif
+ .if \push_lr & 1 != \push_lr
+ .error "push_lr may be either 0 or 1"
+ .endif
+ .if \first != -1
+ .if \last == -1
+ /* Upper-bound not provided: Set upper = lower. */
+ _epilogue \first, \first, \push_ip, \push_lr
+ .exitm
+ .endif
+ .if \last != \first
+ .if \last >= 13
+ .error "SP cannot be in the save list"
+ .endif
+ .if \push_ip
+ .if \push_lr
+ /* Case 1: pop register range, ip and lr registers. */
+ pop {r\first-r\last, ip, lr}
+ .cfi_restore 14
+ .cfi_register 143, 12
+ cfirestorelist \first, \last
+ .else // !\push_lr
+ /* Case 2: pop register range and ip register. */
+ pop {r\first-r\last, ip}
+ .cfi_register 143, 12
+ cfirestorelist \first, \last
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 3: pop register range and lr register. */
+ pop {r\first-r\last, lr}
+ .cfi_restore 14
+ cfirestorelist \first, \last
+ .else // !\push_lr
+ /* Case 4: pop register range. */
+ pop {r\first-r\last}
+ cfirestorelist \first, \last
+ .endif
+ .endif
+ .else // \last == \first
+ .if \push_ip
+ .if \push_lr
+ /* Case 5: pop single GP register plus ip and lr registers. */
+ pop {r\first, ip, lr}
+ .cfi_restore 14
+ .cfi_register 143, 12
+ cfirestorelist \first, \first
+ .else // !\push_lr
+ /* Case 6: pop single GP register plus ip register. */
+ pop {r\first, ip}
+ .cfi_register 143, 12
+ cfirestorelist \first, \first
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 7: pop single GP register plus lr register. */
+ pop {r\first, lr}
+ .cfi_restore 14
+ cfirestorelist \first, \first
+ .else // !\push_lr
+ /* Case 8: pop single GP register. */
+ pop {r\first}
+ cfirestorelist \first, \first
+ .endif
+ .endif
+ .endif
+ .else // \first == -1
+ .if \push_ip
+ .if \push_lr
+ /* Case 9: pop ip and lr registers. */
+ pop {ip, lr}
+ .cfi_restore 14
+ .cfi_register 143, 12
+ .else // !\push_lr
+ /* Case 10: pop ip register. */
+ pop {ip}
+ .cfi_register 143, 12
+ .endif
+ .else // !\push_ip
+ .if \push_lr
+ /* Case 11: pop lr register. */
+ pop {lr}
+ .cfi_restore 14
+ .endif
+ .endif
+ .endif
+#if HAVE_PAC_LEAF
+ aut ip, lr, sp
+#endif /* HAVE_PAC_LEAF */
+ bx lr
+.endm
+
+/* Clean up expressions in 'last'. */
+.macro _preprocess_reglist1 first:req, last:req, push_ip:req, push_lr:req, reglist_op:req
+ .if \last == 0
+ \reglist_op \first, 0, \push_ip, \push_lr
+ .elseif \last == 1
+ \reglist_op \first, 1, \push_ip, \push_lr
+ .elseif \last == 2
+ \reglist_op \first, 2, \push_ip, \push_lr
+ .elseif \last == 3
+ \reglist_op \first, 3, \push_ip, \push_lr
+ .elseif \last == 4
+ \reglist_op \first, 4, \push_ip, \push_lr
+ .elseif \last == 5
+ \reglist_op \first, 5, \push_ip, \push_lr
+ .elseif \last == 6
+ \reglist_op \first, 6, \push_ip, \push_lr
+ .elseif \last == 7
+ \reglist_op \first, 7, \push_ip, \push_lr
+ .elseif \last == 8
+ \reglist_op \first, 8, \push_ip, \push_lr
+ .elseif \last == 9
+ \reglist_op \first, 9, \push_ip, \push_lr
+ .elseif \last == 10
+ \reglist_op \first, 10, \push_ip, \push_lr
+ .elseif \last == 11
+ \reglist_op \first, 11, \push_ip, \push_lr
+ .else
+ .error "last (\last) out of range"
+ .endif
+.endm
+
+/* Clean up expressions in 'first'. */
+.macro _preprocess_reglist first:req, last, push_ip=0, push_lr=0, reglist_op:req
+ .ifb \last
+ _preprocess_reglist \first \first \push_ip \push_lr
+ .else
+ .if \first > \last
+ .error "last (\last) must be at least as great as first (\first)"
+ .endif
+ .if \first == 0
+ _preprocess_reglist1 0, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 1
+ _preprocess_reglist1 1, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 2
+ _preprocess_reglist1 2, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 3
+ _preprocess_reglist1 3, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 4
+ _preprocess_reglist1 4, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 5
+ _preprocess_reglist1 5, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 6
+ _preprocess_reglist1 6, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 7
+ _preprocess_reglist1 7, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 8
+ _preprocess_reglist1 8, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 9
+ _preprocess_reglist1 9, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 10
+ _preprocess_reglist1 10, \last, \push_ip, \push_lr, \reglist_op
+ .elseif \first == 11
+ _preprocess_reglist1 11, \last, \push_ip, \push_lr, \reglist_op
+ .else
+ .error "first (\first) out of range"
+ .endif
+ .endif
+.endm
+
+.macro _align8 first, last, push_ip=0, push_lr=0, reglist_op=_prologue
+ .ifb \first
+ .ifnb \last
+ .error "can't have last (\last) without specifying first"
+ .else // \last not blank
+ .if ((\push_ip + \push_lr) % 2) == 0
+ \reglist_op first=-1, last=-1, push_ip=\push_ip, push_lr=\push_lr
+ .exitm
+ .else // ((\push_ip + \push_lr) % 2) odd
+ _align8 2, 2, \push_ip, \push_lr, \reglist_op
+ .exitm
+ .endif // ((\push_ip + \push_lr) % 2) == 0
+ .endif // .ifnb \last
+ .endif // .ifb \first
+
+ .ifb \last
+ _align8 \first, \first, \push_ip, \push_lr, \reglist_op
+ .else
+ .if \push_ip & 1 <> \push_ip
+ .error "push_ip may be 0 or 1"
+ .endif
+ .if \push_lr & 1 <> \push_lr
+ .error "push_lr may be 0 or 1"
+ .endif
+ .ifeq (\last - \first + \push_ip + \push_lr) % 2
+ .if \first == 0
+ .error "Alignment required and first register is r0"
+ .exitm
+ .endif
+ _preprocess_reglist \first-1, \last, \push_ip, \push_lr, \reglist_op
+ .else
+ _preprocess_reglist \first \last, \push_ip, \push_lr, \reglist_op
+ .endif
+ .endif
+.endm
+
+.macro prologue first, last, push_ip=PAC_LEAF_PUSH_IP, push_lr=0, align8=STACK_ALIGN_ENFORCE
+ .if \align8
+ _align8 \first, \last, \push_ip, \push_lr, _prologue
+ .else
+ _prologue first=\first, last=\last, push_ip=\push_ip, push_lr=\push_lr
+ .endif
+.endm
+
+.macro epilogue first, last, push_ip=PAC_LEAF_PUSH_IP, push_lr=0, align8=STACK_ALIGN_ENFORCE
+ .if \align8
+ _align8 \first, \last, \push_ip, \push_lr, reglist_op=_epilogue
+ .else
+ _epilogue first=\first, last=\last, push_ip=\push_ip, push_lr=\push_lr
+ .endif
+.endm
+
+#define ENTRY_ALIGN(name, alignment) \
+ .global name; \
+ .type name,%function; \
+ .align alignment; \
+ name: \
+ .fnstart; \
+ .cfi_startproc;
+
+#define ENTRY(name) ENTRY_ALIGN(name, 6)
+
+#define ENTRY_ALIAS(name) \
+ .global name; \
+ .type name,%function; \
+ name:
+
+#if defined (IS_LEAF)
+# define END_UNWIND .cantunwind;
+#else
+# define END_UNWIND
+#endif
+
+#define END(name) \
+ .cfi_endproc; \
+ END_UNWIND \
+ .fnend; \
+ .size name, .-name;
+
+#define L(l) .L ## l
+
+#endif
diff --git a/string/arm/check-arch.S b/string/arm/check-arch.S
index 1cff934..9551671 100644
--- a/string/arm/check-arch.S
+++ b/string/arm/check-arch.S
@@ -1,10 +1,13 @@
/*
* check ARCH setting.
*
- * Copyright (c) 2020, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2020-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
#if !__arm__
# error ARCH setting does not match the compiler.
#endif
+
+/* For attributes that may affect ABI. */
+#include "asmdefs.h"
diff --git a/string/arm/memchr.S b/string/arm/memchr.S
index 3f1ac4d..823d601 100644
--- a/string/arm/memchr.S
+++ b/string/arm/memchr.S
@@ -1,8 +1,8 @@
/*
* memchr - scan memory for a character
*
- * Copyright (c) 2010-2021, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2010-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
/*
@@ -23,7 +23,11 @@
@ Removed unneeded cbz from align loop
.syntax unified
+#if __ARM_ARCH >= 8 && __ARM_ARCH_PROFILE == 'M'
+ /* keep config inherited from -march= */
+#else
.arch armv7-a
+#endif
@ this lets us check a flag in a 00/ff byte easily in either endianness
#ifdef __ARMEB__
@@ -32,6 +36,8 @@
#define CHARTSTMASK(c) 1<<(c*8)
#endif
.thumb
+#include "asmdefs.h"
+
@ ---------------------------------------------------------------------------
.thumb_func
@@ -39,11 +45,14 @@
.p2align 4,,15
.global __memchr_arm
.type __memchr_arm,%function
+ .fnstart
+ .cfi_startproc
__memchr_arm:
@ r0 = start of memory to scan
@ r1 = character to look for
@ r2 = length
@ returns r0 = pointer to character or NULL if not found
+ prologue
and r1,r1,#0xff @ Don't think we can trust the caller to actually pass a char
cmp r2,#16 @ If it's short don't bother with anything clever
@@ -64,6 +73,11 @@ __memchr_arm:
10:
@ At this point, we are aligned, we know we have at least 8 bytes to work with
push {r4,r5,r6,r7}
+ .cfi_adjust_cfa_offset 16
+ .cfi_rel_offset 4, 0
+ .cfi_rel_offset 5, 4
+ .cfi_rel_offset 6, 8
+ .cfi_rel_offset 7, 12
orr r1, r1, r1, lsl #8 @ expand the match word across to all bytes
orr r1, r1, r1, lsl #16
bic r4, r2, #7 @ Number of double words to work with
@@ -83,6 +97,11 @@ __memchr_arm:
bne 15b @ (Flags from the subs above) If not run out of bytes then go around again
pop {r4,r5,r6,r7}
+ .cfi_restore 7
+ .cfi_restore 6
+ .cfi_restore 5
+ .cfi_restore 4
+ .cfi_adjust_cfa_offset -16
and r1,r1,#0xff @ Get r1 back to a single character from the expansion above
and r2,r2,#7 @ Leave the count remaining as the number after the double words have been done
@@ -97,16 +116,25 @@ __memchr_arm:
bne 21b @ on r2 flags
40:
+ .cfi_remember_state
movs r0,#0 @ not found
- bx lr
+ epilogue
50:
+ .cfi_restore_state
+ .cfi_remember_state
subs r0,r0,#1 @ found
- bx lr
+ epilogue
60: @ We're here because the fast path found a hit - now we have to track down exactly which word it was
@ r0 points to the start of the double word after the one that was tested
@ r5 has the 00/ff pattern for the first word, r6 has the chained value
+ .cfi_restore_state @ Standard post-prologue state
+ .cfi_adjust_cfa_offset 16
+ .cfi_rel_offset 4, 0
+ .cfi_rel_offset 5, 4
+ .cfi_rel_offset 6, 8
+ .cfi_rel_offset 7, 12
cmp r5, #0
itte eq
moveq r5, r6 @ the end is in the 2nd word
@@ -126,7 +154,15 @@ __memchr_arm:
61:
pop {r4,r5,r6,r7}
+ .cfi_restore 7
+ .cfi_restore 6
+ .cfi_restore 5
+ .cfi_restore 4
+ .cfi_adjust_cfa_offset -16
subs r0,r0,#1
- bx lr
+ epilogue
+ .cfi_endproc
+ .cantunwind
+ .fnend
.size __memchr_arm, . - __memchr_arm
diff --git a/string/arm/memcpy.S b/string/arm/memcpy.S
index 86e6493..2423cfd 100644
--- a/string/arm/memcpy.S
+++ b/string/arm/memcpy.S
@@ -1,8 +1,8 @@
/*
* memcpy - copy memory area
*
- * Copyright (c) 2013-2020, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2013-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
/*
@@ -17,7 +17,7 @@
*/
-#include "../asmdefs.h"
+#include "asmdefs.h"
.syntax unified
/* This implementation requires ARM state. */
diff --git a/string/arm/memset.S b/string/arm/memset.S
index 11e9273..487b9d6 100644
--- a/string/arm/memset.S
+++ b/string/arm/memset.S
@@ -2,7 +2,7 @@
* memset - fill memory with a constant
*
* Copyright (c) 2010-2021, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
/*
diff --git a/string/arm/strcmp-armv6m.S b/string/arm/strcmp-armv6m.S
index b75d414..4d55306 100644
--- a/string/arm/strcmp-armv6m.S
+++ b/string/arm/strcmp-armv6m.S
@@ -1,10 +1,12 @@
/*
* strcmp for ARMv6-M (optimized for performance, not size)
*
- * Copyright (c) 2014-2020, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2014-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
+#include "asmdefs.h"
+
#if __ARM_ARCH == 6 && __ARM_ARCH_6M__ >= 1
.thumb_func
diff --git a/string/arm/strcmp.S b/string/arm/strcmp.S
index 51443e3..74b3d23 100644
--- a/string/arm/strcmp.S
+++ b/string/arm/strcmp.S
@@ -1,8 +1,8 @@
/*
* strcmp for ARMv7
*
- * Copyright (c) 2012-2021, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2012-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
#if __ARM_ARCH >= 7 && __ARM_ARCH_ISA_ARM >= 1
@@ -12,7 +12,7 @@
is sufficiently aligned. Use saturating arithmetic to optimize
the compares. */
-#include "../asmdefs.h"
+#include "asmdefs.h"
/* Build Options:
STRCMP_NO_PRECHECK: Don't run a quick pre-check of the first
@@ -26,6 +26,11 @@
#define STRCMP_NO_PRECHECK 0
+/* Ensure the .cantunwind directive is prepended to .fnend.
+ Leaf functions cannot throw exceptions - EHABI only supports
+ synchronous exceptions. */
+#define IS_LEAF
+
/* This version uses Thumb-2 code. */
.thumb
.syntax unified
@@ -98,8 +103,9 @@
ldrd r4, r5, [sp], #16
.cfi_restore 4
.cfi_restore 5
+ .cfi_adjust_cfa_offset -16
sub result, result, r1, lsr #24
- bx lr
+ epilogue push_ip=HAVE_PAC_LEAF
#else
/* To use the big-endian trick we'd have to reverse all three words.
that's slower than this approach. */
@@ -119,21 +125,15 @@
ldrd r4, r5, [sp], #16
.cfi_restore 4
.cfi_restore 5
+ .cfi_adjust_cfa_offset -16
sub result, result, r1
- bx lr
+ epilogue push_ip=HAVE_PAC_LEAF
#endif
.endm
- .p2align 5
-L(strcmp_start_addr):
-#if STRCMP_NO_PRECHECK == 0
-L(fastpath_exit):
- sub r0, r2, r3
- bx lr
- nop
-#endif
-ENTRY_ALIGN (__strcmp_arm, 0)
+ENTRY(__strcmp_arm)
+ prologue push_ip=HAVE_PAC_LEAF
#if STRCMP_NO_PRECHECK == 0
ldrb r2, [src1]
ldrb r3, [src2]
@@ -143,13 +143,13 @@ ENTRY_ALIGN (__strcmp_arm, 0)
bne L(fastpath_exit)
#endif
strd r4, r5, [sp, #-16]!
- .cfi_def_cfa_offset 16
- .cfi_offset 4, -16
- .cfi_offset 5, -12
+ .cfi_adjust_cfa_offset 16
+ .cfi_rel_offset 4, 0
+ .cfi_rel_offset 5, 4
orr tmp1, src1, src2
strd r6, r7, [sp, #8]
- .cfi_offset 6, -8
- .cfi_offset 7, -4
+ .cfi_rel_offset 6, 8
+ .cfi_rel_offset 7, 12
mvn const_m1, #0
lsl r2, tmp1, #29
cbz r2, L(loop_aligned8)
@@ -318,10 +318,19 @@ L(misaligned_exit):
mov result, tmp1
ldr r4, [sp], #16
.cfi_restore 4
- bx lr
+ .cfi_adjust_cfa_offset -16
+ epilogue push_ip=HAVE_PAC_LEAF
#if STRCMP_NO_PRECHECK == 0
+L(fastpath_exit):
+ .cfi_restore_state
+ .cfi_remember_state
+ sub r0, r2, r3
+ epilogue push_ip=HAVE_PAC_LEAF
+
L(aligned_m1):
+ .cfi_restore_state
+ .cfi_remember_state
add src2, src2, #4
#endif
L(src1_aligned):
@@ -368,9 +377,9 @@ L(overlap3):
/* R6/7 Not used in this sequence. */
.cfi_restore 6
.cfi_restore 7
+ .cfi_adjust_cfa_offset -16
neg result, result
- bx lr
-
+ epilogue push_ip=HAVE_PAC_LEAF
6:
.cfi_restore_state
S2LO data1, data1, #24
@@ -445,7 +454,8 @@ L(strcmp_done_equal):
/* R6/7 not used in this sequence. */
.cfi_restore 6
.cfi_restore 7
- bx lr
+ .cfi_adjust_cfa_offset -16
+ epilogue push_ip=HAVE_PAC_LEAF
L(strcmp_tail):
.cfi_restore_state
@@ -467,8 +477,9 @@ L(strcmp_tail):
/* R6/7 not used in this sequence. */
.cfi_restore 6
.cfi_restore 7
+ .cfi_adjust_cfa_offset -16
sub result, result, data2, lsr #24
- bx lr
+ epilogue push_ip=HAVE_PAC_LEAF
END (__strcmp_arm)
diff --git a/string/arm/strcpy.c b/string/arm/strcpy.c
index 02cf94f..b5728a2 100644
--- a/string/arm/strcpy.c
+++ b/string/arm/strcpy.c
@@ -2,7 +2,7 @@
* strcpy
*
* Copyright (c) 2008-2020, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
#if defined (__thumb2__) && !defined (__thumb__)
diff --git a/string/arm/strlen-armv6t2.S b/string/arm/strlen-armv6t2.S
index 5ad30c9..5eb8671 100644
--- a/string/arm/strlen-armv6t2.S
+++ b/string/arm/strlen-armv6t2.S
@@ -1,8 +1,8 @@
/*
* strlen - calculate the length of a string
*
- * Copyright (c) 2010-2020, Arm Limited.
- * SPDX-License-Identifier: MIT
+ * Copyright (c) 2010-2022, Arm Limited.
+ * SPDX-License-Identifier: MIT OR Apache-2.0 WITH LLVM-exception
*/
#if __ARM_ARCH >= 6 && __ARM_ARCH_ISA_THUMB == 2
@@ -13,7 +13,7 @@
*/
-#include "../asmdefs.h"
+#include "asmdefs.h"
#ifdef __ARMEB__
#define S2LO lsl
@@ -23,6 +23,11 @@
#define S2HI lsl
#endif
+/* Ensure the .cantunwind directive is prepended to .fnend.
+ Leaf functions cannot throw exceptions - EHABI only supports
+ synchronous exceptions. */
+#define IS_LEAF
+
/* This code requires Thumb. */
.thumb
.syntax unified
@@ -41,8 +46,8 @@
#define tmp2 r5
ENTRY (__strlen_armv6t2)
+ prologue 4 5 push_ip=HAVE_PAC_LEAF
pld [srcin, #0]
- strd r4, r5, [sp, #-8]!
bic src, srcin, #7
mvn const_m1, #0
ands tmp1, srcin, #7 /* (8 - bytes) to alignment. */
@@ -92,6 +97,7 @@ L(start_realigned):
beq L(loop_aligned)
L(null_found):
+ .cfi_remember_state
cmp data1a, #0
itt eq
addeq result, result, #4
@@ -100,11 +106,11 @@ L(null_found):
rev data1a, data1a
#endif
clz data1a, data1a
- ldrd r4, r5, [sp], #8
add result, result, data1a, lsr #3 /* Bits -> Bytes. */
- bx lr
+ epilogue 4 5 push_ip=HAVE_PAC_LEAF
L(misaligned8):
+ .cfi_restore_state
ldrd data1a, data1b, [src]
and tmp2, tmp1, #3
rsb result, tmp1, #0