heapless/pool/treiber/
llsc.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
use core::{
    cell::UnsafeCell,
    ptr::{self, NonNull},
};

use super::{Node, Stack};

pub struct AtomicPtr<N>
where
    N: Node,
{
    inner: UnsafeCell<Option<NonNull<N>>>,
}

impl<N> AtomicPtr<N>
where
    N: Node,
{
    pub const fn null() -> Self {
        Self {
            inner: UnsafeCell::new(None),
        }
    }
}

pub struct NonNullPtr<N>
where
    N: Node,
{
    inner: NonNull<N>,
}

impl<N> NonNullPtr<N>
where
    N: Node,
{
    pub fn as_ptr(&self) -> *mut N {
        self.inner.as_ptr().cast()
    }

    pub fn from_static_mut_ref(ref_: &'static mut N) -> Self {
        Self {
            inner: NonNull::from(ref_),
        }
    }
}

impl<N> Clone for NonNullPtr<N>
where
    N: Node,
{
    fn clone(&self) -> Self {
        Self { inner: self.inner }
    }
}

impl<N> Copy for NonNullPtr<N> where N: Node {}

pub unsafe fn push<N>(stack: &Stack<N>, mut node: NonNullPtr<N>)
where
    N: Node,
{
    let top_addr = ptr::addr_of!(stack.top) as *mut usize;

    loop {
        let top = arch::load_link(top_addr);

        node.inner
            .as_mut()
            .next_mut()
            .inner
            .get()
            .write(NonNull::new(top as *mut _));

        if arch::store_conditional(node.inner.as_ptr() as usize, top_addr).is_ok() {
            break;
        }
    }
}

pub fn try_pop<N>(stack: &Stack<N>) -> Option<NonNullPtr<N>>
where
    N: Node,
{
    unsafe {
        let top_addr = ptr::addr_of!(stack.top) as *mut usize;

        loop {
            let top = arch::load_link(top_addr);

            if let Some(top) = NonNull::new(top as *mut N) {
                let next = &top.as_ref().next();

                if arch::store_conditional(
                    next.inner
                        .get()
                        .read()
                        .map(|non_null| non_null.as_ptr() as usize)
                        .unwrap_or_default(),
                    top_addr,
                )
                .is_ok()
                {
                    break Some(NonNullPtr { inner: top });
                }
            } else {
                arch::clear_load_link();

                break None;
            }
        }
    }
}

#[cfg(arm_llsc)]
mod arch {
    use core::arch::asm;

    #[inline(always)]
    pub fn clear_load_link() {
        unsafe { asm!("clrex", options(nomem, nostack)) }
    }

    /// # Safety
    /// - `addr` must be a valid pointer
    #[inline(always)]
    pub unsafe fn load_link(addr: *const usize) -> usize {
        let value;
        asm!("ldrex {}, [{}]", out(reg) value, in(reg) addr, options(nostack));
        value
    }

    /// # Safety
    /// - `addr` must be a valid pointer
    #[inline(always)]
    pub unsafe fn store_conditional(value: usize, addr: *mut usize) -> Result<(), ()> {
        let outcome: usize;
        asm!("strex {}, {}, [{}]", out(reg) outcome, in(reg) value, in(reg) addr, options(nostack));
        if outcome == 0 {
            Ok(())
        } else {
            Err(())
        }
    }
}