aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorWilco Dijkstra <wilco.dijkstra@arm.com>2021-01-08 16:32:25 +0000
committerSzabolcs Nagy <szabolcs.nagy@arm.com>2021-01-08 16:32:25 +0000
commitb7e368fb86d602bb5578450ec2c078f2a876ea71 (patch)
tree530094a1aafd53a9e0e8ebb71e671c0143cdca9e
parentd5b36a8a2885c4dea103561059cd3973bef56506 (diff)
downloadarm-optimized-routines-b7e368fb86d602bb5578450ec2c078f2a876ea71.tar.gz
string: Assembly code cleanup
Cleanup spurious .text and .arch. Use ENTRY rather than ENTRY_ALIGN.
-rw-r--r--string/aarch64/memchr-sve.S5
-rw-r--r--string/aarch64/memcmp-sve.S5
-rw-r--r--string/aarch64/strchr-sve.S5
-rw-r--r--string/aarch64/strcmp-sve.S5
-rw-r--r--string/aarch64/strcpy-sve.S5
-rw-r--r--string/aarch64/strlen-sve.S5
-rw-r--r--string/aarch64/strncmp-mte.S1
-rw-r--r--string/aarch64/strncmp-sve.S5
-rw-r--r--string/aarch64/strncmp.S1
-rw-r--r--string/aarch64/strnlen-sve.S5
-rw-r--r--string/aarch64/strrchr-sve.S5
-rw-r--r--string/arm/memchr.S1
-rw-r--r--string/arm/memset.S1
-rw-r--r--string/arm/strcmp.S1
14 files changed, 9 insertions, 41 deletions
diff --git a/string/aarch64/memchr-sve.S b/string/aarch64/memchr-sve.S
index 816cb98..8fe48b3 100644
--- a/string/aarch64/memchr-sve.S
+++ b/string/aarch64/memchr-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN(__memchr_aarch64_sve, 4)
+ENTRY (__memchr_aarch64_sve)
PTR_ARG (0)
SIZE_ARG (2)
dup z1.b, w1 /* duplicate c to a vector */
diff --git a/string/aarch64/memcmp-sve.S b/string/aarch64/memcmp-sve.S
index b6b2ae2..6445a80 100644
--- a/string/aarch64/memcmp-sve.S
+++ b/string/aarch64/memcmp-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__memcmp_aarch64_sve, 4)
+ENTRY (__memcmp_aarch64_sve)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
diff --git a/string/aarch64/strchr-sve.S b/string/aarch64/strchr-sve.S
index ce23282..1f051da 100644
--- a/string/aarch64/strchr-sve.S
+++ b/string/aarch64/strchr-sve.S
@@ -14,9 +14,6 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
/* To build as strchrnul, define BUILD_STRCHRNUL before compiling this file. */
#ifdef BUILD_STRCHRNUL
#define FUNC __strchrnul_aarch64_sve
@@ -24,7 +21,7 @@
#define FUNC __strchr_aarch64_sve
#endif
-ENTRY_ALIGN (FUNC, 4)
+ENTRY (FUNC)
PTR_ARG (0)
dup z1.b, w1 /* replicate byte across vector */
setffr /* initialize FFR */
diff --git a/string/aarch64/strcmp-sve.S b/string/aarch64/strcmp-sve.S
index 27ee539..593e254 100644
--- a/string/aarch64/strcmp-sve.S
+++ b/string/aarch64/strcmp-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__strcmp_aarch64_sve, 4)
+ENTRY (__strcmp_aarch64_sve)
PTR_ARG (0)
PTR_ARG (1)
setffr /* initialize FFR */
diff --git a/string/aarch64/strcpy-sve.S b/string/aarch64/strcpy-sve.S
index 550132b..30e9eca 100644
--- a/string/aarch64/strcpy-sve.S
+++ b/string/aarch64/strcpy-sve.S
@@ -14,9 +14,6 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
/* To build as stpcpy, define BUILD_STPCPY before compiling this file. */
#ifdef BUILD_STPCPY
#define FUNC __stpcpy_aarch64_sve
@@ -24,7 +21,7 @@
#define FUNC __strcpy_aarch64_sve
#endif
-ENTRY_ALIGN (FUNC, 4)
+ENTRY (FUNC)
PTR_ARG (0)
PTR_ARG (1)
setffr /* initialize FFR */
diff --git a/string/aarch64/strlen-sve.S b/string/aarch64/strlen-sve.S
index 93a8f14..ae96fbb 100644
--- a/string/aarch64/strlen-sve.S
+++ b/string/aarch64/strlen-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__strlen_aarch64_sve, 4)
+ENTRY (__strlen_aarch64_sve)
PTR_ARG (0)
setffr /* initialize FFR */
ptrue p2.b /* all ones; loop invariant */
diff --git a/string/aarch64/strncmp-mte.S b/string/aarch64/strncmp-mte.S
index c6dbe0b..d296724 100644
--- a/string/aarch64/strncmp-mte.S
+++ b/string/aarch64/strncmp-mte.S
@@ -53,7 +53,6 @@
#define LS_BK lsl
#endif
- .text
ENTRY (__strncmp_aarch64_mte)
PTR_ARG (0)
PTR_ARG (1)
diff --git a/string/aarch64/strncmp-sve.S b/string/aarch64/strncmp-sve.S
index 663d93f..80e25eb 100644
--- a/string/aarch64/strncmp-sve.S
+++ b/string/aarch64/strncmp-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__strncmp_aarch64_sve, 4)
+ENTRY (__strncmp_aarch64_sve)
PTR_ARG (0)
PTR_ARG (1)
SIZE_ARG (2)
diff --git a/string/aarch64/strncmp.S b/string/aarch64/strncmp.S
index 52f2396..d3a2b81 100644
--- a/string/aarch64/strncmp.S
+++ b/string/aarch64/strncmp.S
@@ -40,7 +40,6 @@
#define endloop x15
#define count mask
- .text
ENTRY (__strncmp_aarch64)
PTR_ARG (0)
PTR_ARG (1)
diff --git a/string/aarch64/strnlen-sve.S b/string/aarch64/strnlen-sve.S
index 4ebcc55..aec9df2 100644
--- a/string/aarch64/strnlen-sve.S
+++ b/string/aarch64/strnlen-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__strnlen_aarch64_sve, 4)
+ENTRY (__strnlen_aarch64_sve)
PTR_ARG (0)
SIZE_ARG (1)
setffr /* initialize FFR */
diff --git a/string/aarch64/strrchr-sve.S b/string/aarch64/strrchr-sve.S
index 6e3f352..7640b38 100644
--- a/string/aarch64/strrchr-sve.S
+++ b/string/aarch64/strrchr-sve.S
@@ -14,10 +14,7 @@
* SVE Available.
*/
- .arch armv8-a+sve
- .text
-
-ENTRY_ALIGN (__strrchr_aarch64_sve, 4)
+ENTRY (__strrchr_aarch64_sve)
PTR_ARG (0)
dup z1.b, w1 /* replicate byte across vector */
setffr /* initialize FFR */
diff --git a/string/arm/memchr.S b/string/arm/memchr.S
index 2eff4d1..b3ceb4f 100644
--- a/string/arm/memchr.S
+++ b/string/arm/memchr.S
@@ -31,7 +31,6 @@
#else
#define CHARTSTMASK(c) 1<<(c*8)
#endif
- .text
.thumb
@ ---------------------------------------------------------------------------
diff --git a/string/arm/memset.S b/string/arm/memset.S
index 3ee5238..0a60ae0 100644
--- a/string/arm/memset.S
+++ b/string/arm/memset.S
@@ -25,7 +25,6 @@
#else
#define CHARTSTMASK(c) 1<<(c*8)
#endif
- .text
.thumb
@ ---------------------------------------------------------------------------
diff --git a/string/arm/strcmp.S b/string/arm/strcmp.S
index f939da2..171e279 100644
--- a/string/arm/strcmp.S
+++ b/string/arm/strcmp.S
@@ -125,7 +125,6 @@
#endif
.endm
- .text
.p2align 5
L(strcmp_start_addr):
#if STRCMP_NO_PRECHECK == 0