aboutsummaryrefslogtreecommitdiff
path: root/std
diff options
context:
space:
mode:
authorCarter Sande <carter.sande@duodecima.technology>2019-06-24 22:32:50 -0700
committerCarter Sande <carter.sande@duodecima.technology>2019-06-24 22:32:50 -0700
commitcd02630da82b6b0a59583160b12c286a7aec7f8c (patch)
treef3bfbcfc37aac71290e98e364361e9454134c271 /std
parentde2b0cd722ca8fe98d16c86825db4cb2a70931c6 (diff)
downloadzig-cd02630da82b6b0a59583160b12c286a7aec7f8c.tar.gz
zig-cd02630da82b6b0a59583160b12c286a7aec7f8c.zip
compiler-rt: Support thumb versions older than armv6
Add versions of __aeabi_memset and __aeabi_memclr which do not use mov instructions between low registers, as this is unsupported on thumbv4t and thumbv5.
Diffstat (limited to 'std')
-rw-r--r--std/special/compiler_rt.zig43
1 files changed, 41 insertions, 2 deletions
diff --git a/std/special/compiler_rt.zig b/std/special/compiler_rt.zig
index 46607a3adf..95867d6952 100644
--- a/std/special/compiler_rt.zig
+++ b/std/special/compiler_rt.zig
@@ -471,6 +471,28 @@ test "usesThumb1" {
//etc.
}
+const use_thumb_1_pre_armv6 = usesThumb1PreArmv6(builtin.arch);
+
+fn usesThumb1PreArmv6(arch: builtin.Arch) bool {
+ return switch (arch) {
+ .thumb => switch (arch.thumb) {
+ .v5,
+ .v5te,
+ .v4t,
+ => true,
+ else => false,
+ },
+ .thumbeb => switch (arch.thumbeb) {
+ .v5,
+ .v5te,
+ .v4t,
+ => true,
+ else => false,
+ },
+ else => false,
+ };
+}
+
nakedcc fn __aeabi_memcpy() noreturn {
@setRuntimeSafety(false);
if (use_thumb_1) {
@@ -505,7 +527,16 @@ nakedcc fn __aeabi_memmove() noreturn {
nakedcc fn __aeabi_memset() noreturn {
@setRuntimeSafety(false);
- if (use_thumb_1) {
+ if (use_thumb_1_pre_armv6) {
+ asm volatile (
+ \\ eors r1, r2
+ \\ eors r2, r1
+ \\ eors r1, r2
+ \\ push {r7, lr}
+ \\ b memset
+ \\ pop {r7, pc}
+ );
+ } else if (use_thumb_1) {
asm volatile (
\\ mov r3, r1
\\ mov r1, r2
@@ -527,7 +558,15 @@ nakedcc fn __aeabi_memset() noreturn {
nakedcc fn __aeabi_memclr() noreturn {
@setRuntimeSafety(false);
- if (use_thumb_1) {
+ if (use_thumb_1_pre_armv6) {
+ asm volatile (
+ \\ adds r2, r1, #0
+ \\ movs r1, #0
+ \\ push {r7, lr}
+ \\ bl memset
+ \\ pop {r7, pc}
+ );
+ } else if (use_thumb_1) {
asm volatile (
\\ mov r2, r1
\\ movs r1, #0