All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Modules Pages
Loading...
Searching...
No Matches
atomic_utils.h
Go to the documentation of this file.
1/*
2 * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3 *
4 * This file is subject to the terms and conditions of the GNU Lesser General
5 * Public License v2.1. See the file LICENSE in the top level directory for more
6 * details.
7 */
8
9#pragma once
10
138#include <stdint.h>
139
140#include "irq.h"
141#include "macros/utils.h"
142#include "sched.h"
143
144#include "atomic_utils_arch.h" /* IWYU pragma: export */
145
146#ifdef __cplusplus
147extern "C" {
148#endif
149
150/* NOLINTBEGIN(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name)
151 *
152 * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
153 * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
154 * as adding parenthesis around a type would be a synstax error.
155 *
156 * The macro ATOMIC_FETCH_OP_IMPL() uses `val` as argument value. But we want
157 * the declaration may be more specific (e.g. summand instead of val).
158 */
159
160/* Declarations and documentation: */
161
162#if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
182typedef struct {
183 volatile uint8_t *dest;
184 uint8_t mask;
186
192typedef struct {
193 volatile uint16_t *dest;
194 uint16_t mask;
196
202typedef struct {
203 volatile uint32_t *dest;
204 uint32_t mask;
206
212typedef struct {
213 volatile uint64_t *dest;
214 uint64_t mask;
217#endif /* HAS_ATOMIC_BIT */
218
229static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
236static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
243static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
250static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
261static inline unsigned atomic_load_unsigned(const volatile unsigned *var)
262{
263 if (sizeof(uint64_t) == sizeof(unsigned)) {
264 return atomic_load_u64((volatile void *)var);
265 }
266
267 if (sizeof(uint32_t) == sizeof(unsigned)) {
268 return atomic_load_u32((volatile void *)var);
269 }
270
271 return atomic_load_u16((volatile void *)var);
272}
273
280static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
281 if (sizeof(uintptr_t) == 2) {
282 return atomic_load_u16((const volatile uint16_t *)var);
283 }
284
285 if (sizeof(uintptr_t) == 4) {
286 return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
287 }
288
289 return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
290}
297static inline void * atomic_load_ptr(void **ptr_addr) {
298 return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
299}
306static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
307{
308 return (kernel_pid_t)atomic_load_u16((const volatile uint16_t *)var);
309}
321static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
327static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
333static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
339static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
349static inline void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
350{
351 if (sizeof(uint64_t) == sizeof(unsigned)) {
352 atomic_store_u64((volatile void *)dest, val);
353 }
354 else if (sizeof(uint32_t) == sizeof(unsigned)) {
355 atomic_store_u32((volatile void *)dest, val);
356 }
357 else {
358 atomic_store_u16((volatile void *)dest, val);
359 }
360}
361
368static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
369{
370 if (sizeof(uintptr_t) == 2) {
371 atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
372 }
373 else if (sizeof(uintptr_t) == 4) {
374 atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
375 }
376 else {
377 atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
378 }
379}
386static inline void atomic_store_ptr(void **dest, const void *val) {
387 atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
388}
395static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
396 kernel_pid_t val)
397{
398 atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
399}
412static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
413 uint8_t summand);
420static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
421 uint16_t summand);
428static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
429 uint32_t summand);
436static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
437 uint64_t summand);
448static inline unsigned atomic_fetch_add_unsigned(volatile unsigned *dest,
449 unsigned summand)
450{
451 if (sizeof(unsigned) == sizeof(uint64_t)) {
452 return atomic_fetch_add_u64((volatile void *)dest, summand);
453 }
454
455 if (sizeof(unsigned) == sizeof(uint32_t)) {
456 return atomic_fetch_add_u32((volatile void *)dest, summand);
457 }
458
459 return atomic_fetch_add_u16((volatile void *)dest, summand);
460}
474static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
475 uint8_t subtrahend);
483static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
484 uint16_t subtrahend);
492static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
493 uint32_t subtrahend);
501static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
502 uint64_t subtrahend);
514static inline unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest,
515 unsigned subtrahend)
516{
517 if (sizeof(unsigned) == sizeof(uint64_t)) {
518 return atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
519 }
520
521 if (sizeof(unsigned) == sizeof(uint32_t)) {
522 return atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
523 }
524
525 return atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
526}
540static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
548static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
549 uint16_t val);
557static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
558 uint32_t val);
566static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
567 uint64_t val);
579static inline unsigned atomic_fetch_or_unsigned(volatile unsigned *dest,
580 unsigned val)
581{
582 if (sizeof(unsigned) == sizeof(uint64_t)) {
583 return atomic_fetch_or_u64((volatile void *)dest, val);
584 }
585
586 if (sizeof(unsigned) == sizeof(uint32_t)) {
587 return atomic_fetch_or_u32((volatile void *)dest, val);
588 }
589
590 return atomic_fetch_or_u16((volatile void *)dest, val);
591}
605static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
613static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
614 uint16_t val);
622static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
623 uint32_t val);
631static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
632 uint64_t val);
644static inline unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest,
645 unsigned val)
646{
647 if (sizeof(unsigned) == sizeof(uint64_t)) {
648 return atomic_fetch_xor_u64((volatile void *)dest, val);
649 }
650
651 if (sizeof(unsigned) == sizeof(uint32_t)) {
652 return atomic_fetch_xor_u32((volatile void *)dest, val);
653 }
654
655 return atomic_fetch_xor_u16((volatile void *)dest, val);
656}
670static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
678static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
679 uint16_t val);
687static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
688 uint32_t val);
696static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
697 uint64_t val);
709static inline unsigned atomic_fetch_and_unsigned(volatile unsigned *dest,
710 unsigned val)
711{
712 if (sizeof(unsigned) == sizeof(uint64_t)) {
713 return atomic_fetch_and_u64((volatile void *)dest, val);
714 }
715
716 if (sizeof(unsigned) == sizeof(uint32_t)) {
717 return atomic_fetch_and_u32((volatile void *)dest, val);
718 }
719
720 return atomic_fetch_and_u16((volatile void *)dest, val);
721}
733static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
734 uint8_t bit);
735
741static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
742 uint8_t bit);
743
749static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
750 uint8_t bit);
751
757static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
758 uint8_t bit);
769static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
774static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
779static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
784static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
795static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
800static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
805static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
810static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
824static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
825 uint8_t summand);
833static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
834 uint16_t summand);
842static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
843 uint32_t summand);
851static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
852 uint64_t summand);
864static inline unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest,
865 unsigned summand)
866{
867 if (sizeof(unsigned) == sizeof(uint64_t)) {
868 return semi_atomic_fetch_add_u64((volatile void *)dest, summand);
869 }
870
871 if (sizeof(unsigned) == sizeof(uint32_t)) {
872 return semi_atomic_fetch_add_u32((volatile void *)dest, summand);
873 }
874
875 return semi_atomic_fetch_add_u16((volatile void *)dest, summand);
876}
890static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
891 uint8_t subtrahend);
899static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
900 uint16_t subtrahend);
908static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
909 uint32_t subtrahend);
917static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
918 uint64_t subtrahend);
930static inline unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest,
931 unsigned subtrahend)
932{
933 if (sizeof(unsigned) == sizeof(uint64_t)) {
934 return semi_atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
935 }
936
937 if (sizeof(unsigned) == sizeof(uint32_t)) {
938 return semi_atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
939 }
940
941 return semi_atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
942}
956static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
964static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
965 uint16_t val);
973static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
974 uint32_t val);
982static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
983 uint64_t val);
995static inline unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest,
996 unsigned val)
997{
998 if (sizeof(unsigned) == sizeof(uint64_t)) {
999 return semi_atomic_fetch_or_u64((volatile void *)dest, val);
1000 }
1001
1002 if (sizeof(unsigned) == sizeof(uint32_t)) {
1003 return semi_atomic_fetch_or_u32((volatile void *)dest, val);
1004 }
1005
1006 return semi_atomic_fetch_or_u16((volatile void *)dest, val);
1007}
1021static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1022 uint8_t val);
1030static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1031 uint16_t val);
1039static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1040 uint32_t val);
1048static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1049 uint64_t val);
1061static inline unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest,
1062 unsigned val)
1063{
1064 if (sizeof(unsigned) == sizeof(uint64_t)) {
1065 return semi_atomic_fetch_xor_u64((volatile void *)dest, val);
1066 }
1067
1068 if (sizeof(unsigned) == sizeof(uint32_t)) {
1069 return semi_atomic_fetch_xor_u32((volatile void *)dest, val);
1070 }
1071
1072 return semi_atomic_fetch_xor_u16((volatile void *)dest, val);
1073}
1087static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1088 uint8_t val);
1096static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1097 uint16_t val);
1105static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1106 uint32_t val);
1114static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1115 uint64_t val);
1127static inline unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest,
1128 unsigned val)
1129{
1130 if (sizeof(unsigned) == sizeof(uint64_t)) {
1131 return semi_atomic_fetch_and_u64((volatile void *)dest, val);
1132 }
1133
1134 if (sizeof(unsigned) == sizeof(uint32_t)) {
1135 return semi_atomic_fetch_and_u32((volatile void *)dest, val);
1136 }
1137
1138 return semi_atomic_fetch_and_u16((volatile void *)dest, val);
1139}
1142/* Fallback implementations of atomic utility functions: */
1143
1151#define ATOMIC_LOAD_IMPL(name, type) \
1152 static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1153 { \
1154 unsigned state = irq_disable(); \
1155 type result = *var; \
1156 irq_restore(state); \
1157 return result; \
1158 }
1159
1160#ifndef HAS_ATOMIC_LOAD_U8
1161ATOMIC_LOAD_IMPL(u8, uint8_t)
1162#endif
1163#ifndef HAS_ATOMIC_LOAD_U16
1164ATOMIC_LOAD_IMPL(u16, uint16_t)
1165#endif
1166#ifndef HAS_ATOMIC_LOAD_U32
1167ATOMIC_LOAD_IMPL(u32, uint32_t)
1168#endif
1169#ifndef HAS_ATOMIC_LOAD_U64
1170ATOMIC_LOAD_IMPL(u64, uint64_t)
1171#endif
1172
1180#define ATOMIC_STORE_IMPL(name, type) \
1181 static inline void CONCAT(atomic_store_, name) \
1182 (volatile type *dest, type val) \
1183 { \
1184 unsigned state = irq_disable(); \
1185 *dest = val; \
1186 irq_restore(state); \
1187 }
1188
1189#ifndef HAS_ATOMIC_STORE_U8
1190ATOMIC_STORE_IMPL(u8, uint8_t)
1191#endif
1192#ifndef HAS_ATOMIC_STORE_U16
1193ATOMIC_STORE_IMPL(u16, uint16_t)
1194#endif
1195#ifndef HAS_ATOMIC_STORE_U32
1196ATOMIC_STORE_IMPL(u32, uint32_t)
1197#endif
1198#ifndef HAS_ATOMIC_STORE_U64
1199ATOMIC_STORE_IMPL(u64, uint64_t)
1200#endif
1201
1211#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1212 static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1213 (volatile type *dest, type val) \
1214 { \
1215 unsigned state = irq_disable(); \
1216 const type result = *dest; \
1217 *dest = result op val; \
1218 irq_restore(state); \
1219 return result; \
1220 }
1221
1222#ifndef HAS_ATOMIC_FETCH_ADD_U8
1223ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
1224#endif
1225#ifndef HAS_ATOMIC_FETCH_ADD_U16
1226ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
1227#endif
1228#ifndef HAS_ATOMIC_FETCH_ADD_U32
1229ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
1230#endif
1231#ifndef HAS_ATOMIC_FETCH_ADD_U64
1232ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
1233#endif
1234
1235#ifndef HAS_ATOMIC_FETCH_SUB_U8
1236ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
1237#endif
1238#ifndef HAS_ATOMIC_FETCH_SUB_U16
1239ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
1240#endif
1241#ifndef HAS_ATOMIC_FETCH_SUB_U32
1242ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
1243#endif
1244#ifndef HAS_ATOMIC_FETCH_SUB_U64
1245ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
1246#endif
1247
1248#ifndef HAS_ATOMIC_FETCH_OR_U8
1249ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
1250#endif
1251#ifndef HAS_ATOMIC_FETCH_OR_U16
1252ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
1253#endif
1254#ifndef HAS_ATOMIC_FETCH_OR_U32
1255ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
1256#endif
1257#ifndef HAS_ATOMIC_FETCH_OR_U64
1258ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
1259#endif
1260
1261#ifndef HAS_ATOMIC_FETCH_XOR_U8
1262ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
1263#endif
1264#ifndef HAS_ATOMIC_FETCH_XOR_U16
1265ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
1266#endif
1267#ifndef HAS_ATOMIC_FETCH_XOR_U32
1268ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
1269#endif
1270#ifndef HAS_ATOMIC_FETCH_XOR_U64
1271ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
1272#endif
1273
1274#ifndef HAS_ATOMIC_FETCH_AND_U8
1275ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
1276#endif
1277#ifndef HAS_ATOMIC_FETCH_AND_U16
1278ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
1279#endif
1280#ifndef HAS_ATOMIC_FETCH_AND_U32
1281ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1282#endif
1283#ifndef HAS_ATOMIC_FETCH_AND_U64
1284ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1285#endif
1286
1287#ifndef HAS_ATOMIC_BIT
1288static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1289 uint8_t bit)
1290{
1291 atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1292 return result;
1293}
1294static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1295 uint8_t bit)
1296{
1297 atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1298 return result;
1299}
1300static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1301 uint8_t bit)
1302{
1303 atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1304 return result;
1305}
1306static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1307 uint8_t bit)
1308{
1309 atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1310 return result;
1311}
1312static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1313{
1314 atomic_fetch_or_u8(bit.dest, bit.mask);
1315}
1317{
1318 atomic_fetch_or_u16(bit.dest, bit.mask);
1319}
1321{
1322 atomic_fetch_or_u32(bit.dest, bit.mask);
1323}
1325{
1326 atomic_fetch_or_u64(bit.dest, bit.mask);
1327}
1329{
1330 atomic_fetch_and_u8(bit.dest, ~bit.mask);
1331}
1333{
1334 atomic_fetch_and_u16(bit.dest, ~bit.mask);
1335}
1337{
1338 atomic_fetch_and_u32(bit.dest, ~bit.mask);
1339}
1341{
1342 atomic_fetch_and_u64(bit.dest, ~bit.mask);
1343}
1344#endif
1345
1346/* Provide semi_atomic_*() functions on top.
1347 *
1348 * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1349 * - Else:
1350 * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1351 * store atomic, as we can avoid touching the IRQ state register that
1352 * way
1353 * - Else: We need to disable and re-enable IRQs anyway, we just use the
1354 * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1355 * as well
1356 */
1357
1358/* FETCH_ADD */
1359#if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1360static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1361 uint8_t val)
1362{
1363 return atomic_fetch_add_u8(dest, val);
1364}
1365#else
1366static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1367 uint8_t val)
1368{
1369 uint8_t result = atomic_load_u8(dest);
1370 atomic_store_u8(dest, result + val);
1371 return result;
1372}
1373#endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1374
1375#if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1376static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1377 uint16_t val)
1378{
1379 return atomic_fetch_add_u16(dest, val);
1380}
1381#else
1382static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1383 uint16_t val)
1384{
1385 uint16_t result = atomic_load_u16(dest);
1386 atomic_store_u16(dest, result + val);
1387 return result;
1388}
1389#endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1390
1391#if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1392static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1393 uint32_t val)
1394{
1395 return atomic_fetch_add_u32(dest, val);
1396}
1397#else
1398static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1399 uint32_t val)
1400{
1401 uint32_t result = atomic_load_u32(dest);
1402 atomic_store_u32(dest, result + val);
1403 return result;
1404}
1405#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1406
1407#if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1408static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1409 uint64_t val)
1410{
1411 return atomic_fetch_add_u64(dest, val);
1412}
1413#else
1414static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1415 uint64_t val)
1416{
1417 atomic_store_u64(dest, *dest + val);
1418}
1419#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1420
1421/* FETCH_SUB */
1422#if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1423static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1424 uint8_t val)
1425{
1426 return atomic_fetch_sub_u8(dest, val);
1427}
1428#else
1429static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1430 uint8_t val)
1431{
1432 uint8_t result = atomic_load_u8(dest);
1433 atomic_store_u8(dest, result - val);
1434 return result;
1435}
1436#endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1437
1438#if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1439static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1440 uint16_t val)
1441{
1442 return atomic_fetch_sub_u16(dest, val);
1443}
1444#else
1445static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1446 uint16_t val)
1447{
1448 uint16_t result = atomic_load_u16(dest);
1449 atomic_store_u16(dest, result - val);
1450 return result;
1451}
1452#endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1453
1454#if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1455static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1456 uint32_t val)
1457{
1458 return atomic_fetch_sub_u32(dest, val);
1459}
1460#else
1461static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1462 uint32_t val)
1463{
1464 uint32_t result = atomic_load_u32(dest);
1465 atomic_store_u32(dest, result - val);
1466 return result;
1467}
1468#endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1469
1470#if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1471static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1472 uint64_t val)
1473{
1474 return atomic_fetch_sub_u64(dest, val);
1475}
1476#else
1477static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1478 uint64_t val)
1479{
1480 uint64_t result = atomic_load_u64(dest);
1481 atomic_store_u64(dest, result - val);
1482 return result;
1483}
1484#endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1485
1486/* FETCH_OR */
1487#if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1488static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1489 uint8_t val)
1490{
1491 return atomic_fetch_or_u8(dest, val);
1492}
1493#else
1494static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1495 uint8_t val)
1496{
1497 uint8_t result = atomic_load_u8(dest);
1498 atomic_store_u8(dest, result | val);
1499 return result;
1500}
1501#endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1502
1503#if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1504static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1505 uint16_t val)
1506{
1507 return atomic_fetch_or_u16(dest, val);
1508}
1509#else
1510static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1511 uint16_t val)
1512{
1513 uint16_t result = atomic_load_u16(dest);
1514 atomic_store_u16(dest, result | val);
1515 return result;
1516}
1517#endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1518
1519#if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1520static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1521 uint32_t val)
1522{
1523 return atomic_fetch_or_u32(dest, val);
1524}
1525#else
1526static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1527 uint32_t val)
1528{
1529 uint32_t result = atomic_load_u32(dest);
1530 atomic_store_u32(dest, result | val);
1531 return result;
1532}
1533#endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1534
1535#if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1536static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1537 uint64_t val)
1538{
1539 return atomic_fetch_or_u64(dest, val);
1540}
1541#else
1542static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1543 uint64_t val)
1544{
1545 uint64_t result = atomic_load_u64(dest);
1546 atomic_store_u64(dest, result | val);
1547 return result;
1548}
1549#endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1550
1551/* FETCH_XOR */
1552#if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1553static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1554 uint8_t val)
1555{
1556 return atomic_fetch_xor_u8(dest, val);
1557}
1558#else
1559static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1560 uint8_t val)
1561{
1562 uint8_t result = atomic_load_u8(dest);
1563 atomic_store_u8(dest, result ^ val);
1564 return result;
1565}
1566#endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1567
1568#if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1569static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1570 uint16_t val)
1571{
1572 return atomic_fetch_xor_u16(dest, val);
1573}
1574#else
1575static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1576 uint16_t val)
1577{
1578 uint16_t result = atomic_load_u16(dest);
1579 atomic_store_u16(dest, result ^ val);
1580 return result;
1581}
1582#endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1583
1584#if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1585static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1586 uint32_t val)
1587{
1588 return atomic_fetch_xor_u32(dest, val);
1589}
1590#else
1591static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1592 uint32_t val)
1593{
1594 uint32_t result = atomic_load_u32(dest);
1595 atomic_store_u32(dest, result ^ val);
1596 return result;
1597}
1598#endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1599
1600#if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1601static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1602 uint64_t val)
1603{
1604 return atomic_fetch_xor_u64(dest, val);
1605}
1606#else
1607static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1608 uint64_t val)
1609{
1610 uint64_t result = atomic_load_u64(dest);
1611 atomic_store_u64(dest, result ^ val);
1612 return result;
1613}
1614#endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1615
1616/* FETCH_AND */
1617#if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1618static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1619 uint8_t val)
1620{
1621 return atomic_fetch_and_u8(dest, val);
1622}
1623#else
1624static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1625 uint8_t val)
1626{
1627 uint8_t result = atomic_load_u8(dest);
1628 atomic_store_u8(dest, result & val);
1629 return result;
1630}
1631#endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1632
1633#if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1634static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1635 uint16_t val)
1636{
1637 return atomic_fetch_and_u16(dest, val);
1638}
1639#else
1640static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1641 uint16_t val)
1642{
1643 uint16_t result = atomic_load_u16(dest);
1644 atomic_store_u16(dest, result & val);
1645 return result;
1646}
1647#endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1648
1649#if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1650static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1651 uint32_t val)
1652{
1653 return atomic_fetch_and_u32(dest, val);
1654}
1655#else
1656static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1657 uint32_t val)
1658{
1659 uint32_t result = atomic_load_u32(dest);
1660 atomic_store_u32(dest, result & val);
1661 return result;
1662}
1663#endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1664
1665#if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1666static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1667 uint64_t val)
1668{
1669 return atomic_fetch_and_u64(dest, val);
1670}
1671#else
1672static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1673 uint64_t val)
1674{
1675 uint64_t result = atomic_load_u64(dest);
1676 atomic_store_u64(dest, result & val);
1677 return result;
1678}
1679#endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1680
1681#ifdef __cplusplus
1682}
1683#endif
1684
1685/* NOLINTEND(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name) */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition sched.h:138
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
uint16_t mask
Bitmask used for setting the bit.
volatile uint16_t * dest
Memory containing the bit to set/clear.
Type specifying a bit in an uint32_t
volatile uint32_t * dest
Memory containing the bit to set/clear.
uint32_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint64_t
volatile uint64_t * dest
Memory containing the bit to set/clear.
uint64_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint8_t
uint8_t mask
Bitmask used for setting the bit.
volatile uint8_t * dest
Memory containing the bit to set/clear.