From 274e2a1ef1513bb2960becf60854e6e4d574ff5c Mon Sep 17 00:00:00 2001 From: Koakuma Date: Fri, 15 Apr 2022 05:59:55 +0700 Subject: [PATCH 1/5] compiler_rt: atomics: Add TAS lock support for SPARC Some SPARC CPUs (particularly old and/or embedded ones) only has atomic TAS instruction available (`ldstub`). This adds support for emitting that instruction in the spinlock. --- lib/std/special/compiler_rt/atomics.zig | 30 ++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/lib/std/special/compiler_rt/atomics.zig b/lib/std/special/compiler_rt/atomics.zig index 7727d7af3d..accec2e35a 100644 --- a/lib/std/special/compiler_rt/atomics.zig +++ b/lib/std/special/compiler_rt/atomics.zig @@ -24,6 +24,13 @@ const supports_atomic_ops = switch (arch) { // load/store atomically. // Objects bigger than this threshold require the use of a lock. const largest_atomic_size = switch (arch) { + // On SPARC systems that lacks CAS and/or swap instructions, the only + // available atomic operation is a test-and-set (`ldstub`), so we force + // every atomic memory access to go through the lock. + // XXX: Check the presence of CAS/swap instructions and set this parameter + // accordingly. + .sparc, .sparcel, .sparcv9 => 0, + // XXX: On x86/x86_64 we could check the presence of cmpxchg8b/cmpxchg16b // and set this parameter accordingly. else => @sizeOf(usize), @@ -38,18 +45,35 @@ const SpinlockTable = struct { const Spinlock = struct { // Prevent false sharing by providing enough padding between two // consecutive spinlock elements - v: enum(usize) { Unlocked = 0, Locked } align(cache_line_size) = .Unlocked, + v: if (arch.isSPARC()) enum(u8) { Unlocked = 0, Locked = 255 } else enum(usize) { Unlocked = 0, Locked } align(cache_line_size) = .Unlocked, fn acquire(self: *@This()) void { while (true) { - switch (@atomicRmw(@TypeOf(self.v), &self.v, .Xchg, .Locked, .Acquire)) { + const flag = if (comptime arch.isSPARC()) + asm volatile ("ldstub [%[addr]], %[flag]" + : [flag] "=r" (-> @TypeOf(self.v)), + : [addr] "r" (&self.v), + : "memory" + ) + else + @atomicRmw(@TypeOf(self.v), &self.v, .Xchg, .Locked, .Acquire); + + switch (flag) { .Unlocked => break, .Locked => {}, } } } fn release(self: *@This()) void { - @atomicStore(@TypeOf(self.v), &self.v, .Unlocked, .Release); + if (comptime arch.isSPARC()) { + _ = asm volatile ("clr [%[addr]]" + : + : [addr] "r" (&self.v), + : "memory" + ); + } else { + @atomicStore(@TypeOf(self.v), &self.v, .Unlocked, .Release); + } } }; From 6aa89115f974b639d6006d8106a7df7d94f636ac Mon Sep 17 00:00:00 2001 From: Koakuma Date: Fri, 15 Apr 2022 19:31:55 +0700 Subject: [PATCH 2/5] ompiler_rt: atomics: Split long lines and add comment on constants --- lib/std/special/compiler_rt/atomics.zig | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lib/std/special/compiler_rt/atomics.zig b/lib/std/special/compiler_rt/atomics.zig index accec2e35a..aa045a2ab9 100644 --- a/lib/std/special/compiler_rt/atomics.zig +++ b/lib/std/special/compiler_rt/atomics.zig @@ -43,9 +43,15 @@ const SpinlockTable = struct { const max_spinlocks = 64; const Spinlock = struct { + // SPARC ldstub instruction will write a 255 into the memory location. + // We'll use that as a sign that the lock is currently held. + // See also: Section B.7 in SPARCv8 spec & A.29 in SPARCv9 spec. + const sparc_lock: type = enum(u8) { Unlocked = 0, Locked = 255 }; + const other_lock: type = enum(usize) { Unlocked = 0, Locked }; + // Prevent false sharing by providing enough padding between two // consecutive spinlock elements - v: if (arch.isSPARC()) enum(u8) { Unlocked = 0, Locked = 255 } else enum(usize) { Unlocked = 0, Locked } align(cache_line_size) = .Unlocked, + v: if (arch.isSPARC()) sparc_lock else other_lock align(cache_line_size) = .Unlocked, fn acquire(self: *@This()) void { while (true) { From 5b283fba77d50b48d9b9895fac36ee3e1844bd1c Mon Sep 17 00:00:00 2001 From: Koakuma Date: Fri, 15 Apr 2022 19:40:36 +0700 Subject: [PATCH 3/5] compiler_rt: atomics: Add Leon CAS instruction check for SPARC atomics --- lib/std/special/compiler_rt/atomics.zig | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lib/std/special/compiler_rt/atomics.zig b/lib/std/special/compiler_rt/atomics.zig index aa045a2ab9..07b15c3102 100644 --- a/lib/std/special/compiler_rt/atomics.zig +++ b/lib/std/special/compiler_rt/atomics.zig @@ -1,6 +1,7 @@ const std = @import("std"); const builtin = @import("builtin"); -const arch = builtin.cpu.arch; +const cpu = builtin.cpu; +const arch = cpu.arch; const linkage: std.builtin.GlobalLinkage = if (builtin.is_test) .Internal else .Weak; @@ -27,9 +28,7 @@ const largest_atomic_size = switch (arch) { // On SPARC systems that lacks CAS and/or swap instructions, the only // available atomic operation is a test-and-set (`ldstub`), so we force // every atomic memory access to go through the lock. - // XXX: Check the presence of CAS/swap instructions and set this parameter - // accordingly. - .sparc, .sparcel, .sparcv9 => 0, + .sparc, .sparcel => if (cpu.features.featureSetHas(.hasleoncasa)) @sizeOf(usize) else 0, // XXX: On x86/x86_64 we could check the presence of cmpxchg8b/cmpxchg16b // and set this parameter accordingly. From fac2a2e7545db8d814baf2f011acee24bf9f95ee Mon Sep 17 00:00:00 2001 From: Koakuma Date: Fri, 15 Apr 2022 19:44:46 +0700 Subject: [PATCH 4/5] compiler_rt: atomics: Formatting change for flag definition --- lib/std/special/compiler_rt/atomics.zig | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/std/special/compiler_rt/atomics.zig b/lib/std/special/compiler_rt/atomics.zig index 07b15c3102..a60fcbbecd 100644 --- a/lib/std/special/compiler_rt/atomics.zig +++ b/lib/std/special/compiler_rt/atomics.zig @@ -54,14 +54,15 @@ const SpinlockTable = struct { fn acquire(self: *@This()) void { while (true) { - const flag = if (comptime arch.isSPARC()) - asm volatile ("ldstub [%[addr]], %[flag]" + const flag = if (comptime arch.isSPARC()) flag: { + break :flag asm volatile ("ldstub [%[addr]], %[flag]" : [flag] "=r" (-> @TypeOf(self.v)), : [addr] "r" (&self.v), : "memory" - ) - else - @atomicRmw(@TypeOf(self.v), &self.v, .Xchg, .Locked, .Acquire); + ); + } else flag: { + break :flag @atomicRmw(@TypeOf(self.v), &self.v, .Xchg, .Locked, .Acquire); + }; switch (flag) { .Unlocked => break, From 33956b8e558b1c422dab8f91643c0a9caae84501 Mon Sep 17 00:00:00 2001 From: Koakuma Date: Fri, 15 Apr 2022 19:48:25 +0700 Subject: [PATCH 5/5] compiler_rt: atomics: clr -> clrb --- lib/std/special/compiler_rt/atomics.zig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/std/special/compiler_rt/atomics.zig b/lib/std/special/compiler_rt/atomics.zig index a60fcbbecd..20545d0791 100644 --- a/lib/std/special/compiler_rt/atomics.zig +++ b/lib/std/special/compiler_rt/atomics.zig @@ -72,7 +72,7 @@ const SpinlockTable = struct { } fn release(self: *@This()) void { if (comptime arch.isSPARC()) { - _ = asm volatile ("clr [%[addr]]" + _ = asm volatile ("clrb [%[addr]]" : : [addr] "r" (&self.v), : "memory"