atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universit├Ąt Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
136 #ifndef ATOMIC_UTILS_H
137 #define ATOMIC_UTILS_H
138 
139 #include <stdint.h>
140 
141 #include "irq.h"
142 #include "sched.h"
143 
144 #include "atomic_utils_arch.h"
145 
146 #ifdef __cplusplus
147 extern "C" {
148 #endif
149 
150 /* Declarations and documentation: */
151 
152 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
172 typedef struct {
173  volatile uint8_t *dest;
174  uint8_t mask;
176 
182 typedef struct {
183  volatile uint16_t *dest;
184  uint16_t mask;
186 
192 typedef struct {
193  volatile uint32_t *dest;
194  uint32_t mask;
196 
202 typedef struct {
203  volatile uint64_t *dest;
204  uint64_t mask;
207 #endif /* HAS_ATOMIC_BIT */
208 
219 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
226 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
233 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
240 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
241 
248 static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
249  if (sizeof(uintptr_t) == 2) {
250  return atomic_load_u16((const volatile uint16_t *)var);
251  }
252  else if (sizeof(uintptr_t) == 4) {
253  return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
254  }
255  else {
256  return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
257  }
258 }
265 static inline void * atomic_load_ptr(void **ptr_addr) {
266  return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
267 }
274 static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
275 {
276  return atomic_load_u16((const volatile uint16_t *)var);
277 }
289 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
295 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
301 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
307 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
308 
315 static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
316 {
317  if (sizeof(uintptr_t) == 2) {
318  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
319  }
320  else if (sizeof(uintptr_t) == 4) {
321  atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
322  }
323  else {
324  atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
325  }
326 }
333 static inline void atomic_store_ptr(void **dest, const void *val) {
334  atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
335 }
342 static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
343  kernel_pid_t val)
344 {
345  atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
346 }
359 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
360  uint8_t summand);
367 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
368  uint16_t summand);
375 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
376  uint32_t summand);
383 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
384  uint64_t summand);
398 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
399  uint8_t subtrahend);
407 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
408  uint16_t subtrahend);
416 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
417  uint32_t subtrahend);
425 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
426  uint64_t subtrahend);
440 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
448 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
449  uint16_t val);
457 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
458  uint32_t val);
466 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
467  uint64_t val);
481 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
489 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
490  uint16_t val);
498 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
499  uint32_t val);
507 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
508  uint64_t val);
522 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
530 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
531  uint16_t val);
539 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
540  uint32_t val);
548 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
549  uint64_t val);
561 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
562  uint8_t bit);
563 
569 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
570  uint8_t bit);
571 
577 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
578  uint8_t bit);
579 
585 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
586  uint8_t bit);
597 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
602 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
607 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
612 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
623 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
628 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
633 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
638 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
652 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
653  uint8_t summand);
661 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
662  uint16_t summand);
670 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
671  uint32_t summand);
679 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
680  uint64_t summand);
694 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
695  uint8_t subtrahend);
703 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
704  uint16_t subtrahend);
712 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
713  uint32_t subtrahend);
721 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
722  uint64_t subtrahend);
736 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
744 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
745  uint16_t val);
753 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
754  uint32_t val);
762 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
763  uint64_t val);
777 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
778  uint8_t val);
786 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
787  uint16_t val);
795 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
796  uint32_t val);
804 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
805  uint64_t val);
819 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
820  uint8_t val);
828 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
829  uint16_t val);
837 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
838  uint32_t val);
846 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
847  uint64_t val);
850 /* Fallback implementations of atomic utility functions: */
851 
855 #define CONCAT(a, b) a ## b
856 
860 #define CONCAT4(a, b, c, d) a ## b ## c ## d
861 
869 #define ATOMIC_LOAD_IMPL(name, type) \
870  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
871  { \
872  unsigned state = irq_disable(); \
873  type result = *var; \
874  irq_restore(state); \
875  return result; \
876  }
877 
878 #ifndef HAS_ATOMIC_LOAD_U8
879 ATOMIC_LOAD_IMPL(u8, uint8_t)
880 #endif
881 #ifndef HAS_ATOMIC_LOAD_U16
882 ATOMIC_LOAD_IMPL(u16, uint16_t)
883 #endif
884 #ifndef HAS_ATOMIC_LOAD_U32
885 ATOMIC_LOAD_IMPL(u32, uint32_t)
886 #endif
887 #ifndef HAS_ATOMIC_LOAD_U64
888 ATOMIC_LOAD_IMPL(u64, uint64_t)
889 #endif
890 
898 #define ATOMIC_STORE_IMPL(name, type) \
899  static inline void CONCAT(atomic_store_, name) \
900  (volatile type *dest, type val) \
901  { \
902  unsigned state = irq_disable(); \
903  *dest = val; \
904  irq_restore(state); \
905  }
906 
907 #ifndef HAS_ATOMIC_STORE_U8
908 ATOMIC_STORE_IMPL(u8, uint8_t)
909 #endif
910 #ifndef HAS_ATOMIC_STORE_U16
911 ATOMIC_STORE_IMPL(u16, uint16_t)
912 #endif
913 #ifndef HAS_ATOMIC_STORE_U32
914 ATOMIC_STORE_IMPL(u32, uint32_t)
915 #endif
916 #ifndef HAS_ATOMIC_STORE_U64
917 ATOMIC_STORE_IMPL(u64, uint64_t)
918 #endif
919 
929 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
930  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
931  (volatile type *dest, type val) \
932  { \
933  unsigned state = irq_disable(); \
934  const type result = *dest; \
935  *dest = result op val; \
936  irq_restore(state); \
937  return result; \
938  }
939 
940 #ifndef HAS_ATOMIC_FETCH_ADD_U8
941 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
942 #endif
943 #ifndef HAS_ATOMIC_FETCH_ADD_U16
944 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
945 #endif
946 #ifndef HAS_ATOMIC_FETCH_ADD_U32
947 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
948 #endif
949 #ifndef HAS_ATOMIC_FETCH_ADD_U64
950 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
951 #endif
952 
953 #ifndef HAS_ATOMIC_FETCH_SUB_U8
954 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
955 #endif
956 #ifndef HAS_ATOMIC_FETCH_SUB_U16
957 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
958 #endif
959 #ifndef HAS_ATOMIC_FETCH_SUB_U32
960 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
961 #endif
962 #ifndef HAS_ATOMIC_FETCH_SUB_U64
963 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
964 #endif
965 
966 #ifndef HAS_ATOMIC_FETCH_OR_U8
967 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
968 #endif
969 #ifndef HAS_ATOMIC_FETCH_OR_U16
970 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
971 #endif
972 #ifndef HAS_ATOMIC_FETCH_OR_U32
973 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
974 #endif
975 #ifndef HAS_ATOMIC_FETCH_OR_U64
976 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
977 #endif
978 
979 #ifndef HAS_ATOMIC_FETCH_XOR_U8
980 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
981 #endif
982 #ifndef HAS_ATOMIC_FETCH_XOR_U16
983 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
984 #endif
985 #ifndef HAS_ATOMIC_FETCH_XOR_U32
986 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
987 #endif
988 #ifndef HAS_ATOMIC_FETCH_XOR_U64
989 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
990 #endif
991 
992 #ifndef HAS_ATOMIC_FETCH_AND_U8
993 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
994 #endif
995 #ifndef HAS_ATOMIC_FETCH_AND_U16
996 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
997 #endif
998 #ifndef HAS_ATOMIC_FETCH_AND_U32
999 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1000 #endif
1001 #ifndef HAS_ATOMIC_FETCH_AND_U64
1002 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1003 #endif
1004 
1005 #ifndef HAS_ATOMIC_BIT
1006 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1007  uint8_t bit)
1008 {
1009  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1010  return result;
1011 }
1012 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1013  uint8_t bit)
1014 {
1015  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1016  return result;
1017 }
1018 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1019  uint8_t bit)
1020 {
1021  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1022  return result;
1023 }
1024 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1025  uint8_t bit)
1026 {
1027  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1028  return result;
1029 }
1030 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1031 {
1032  atomic_fetch_or_u8(bit.dest, bit.mask);
1033 }
1034 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
1035 {
1036  atomic_fetch_or_u16(bit.dest, bit.mask);
1037 }
1038 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
1039 {
1040  atomic_fetch_or_u32(bit.dest, bit.mask);
1041 }
1042 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
1043 {
1044  atomic_fetch_or_u64(bit.dest, bit.mask);
1045 }
1046 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
1047 {
1048  atomic_fetch_and_u8(bit.dest, ~bit.mask);
1049 }
1051 {
1052  atomic_fetch_and_u16(bit.dest, ~bit.mask);
1053 }
1055 {
1056  atomic_fetch_and_u32(bit.dest, ~bit.mask);
1057 }
1059 {
1060  atomic_fetch_and_u64(bit.dest, ~bit.mask);
1061 }
1062 #endif
1063 
1064 /* Provide semi_atomic_*() functions on top.
1065  *
1066  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1067  * - Else:
1068  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1069  * store atomic, as we can avoid touching the IRQ state register that
1070  * way
1071  * - Else: We need to disable and re-enable IRQs anyway, we just use the
1072  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1073  * as well
1074  */
1075 
1076 /* FETCH_ADD */
1077 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1078 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1079  uint8_t val)
1080 {
1081  return atomic_fetch_add_u8(dest, val);
1082 }
1083 #else
1084 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1085  uint8_t val)
1086 {
1087  uint8_t result = atomic_load_u8(dest);
1088  atomic_store_u8(dest, result + val);
1089  return result;
1090 }
1091 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1092 
1093 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1094 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1095  uint16_t val)
1096 {
1097  return atomic_fetch_add_u16(dest, val);
1098 }
1099 #else
1100 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1101  uint16_t val)
1102 {
1103  uint16_t result = atomic_load_u16(dest);
1104  atomic_store_u16(dest, result + val);
1105  return result;
1106 }
1107 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1108 
1109 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1110 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1111  uint32_t val)
1112 {
1113  return atomic_fetch_add_u32(dest, val);
1114 }
1115 #else
1116 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1117  uint32_t val)
1118 {
1119  uint32_t result = atomic_load_u32(dest);
1120  atomic_store_u32(dest, result + val);
1121  return result;
1122 }
1123 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1124 
1125 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1126 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1127  uint64_t val)
1128 {
1129  return atomic_fetch_add_u64(dest, val);
1130 }
1131 #else
1132 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1133  uint64_t val)
1134 {
1135  atomic_store_u64(dest, *dest + val);
1136 }
1137 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1138 
1139 /* FETCH_SUB */
1140 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1141 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1142  uint8_t val)
1143 {
1144  return atomic_fetch_sub_u8(dest, val);
1145 }
1146 #else
1147 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1148  uint8_t val)
1149 {
1150  uint8_t result = atomic_load_u8(dest);
1151  atomic_store_u8(dest, result - val);
1152  return result;
1153 }
1154 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1155 
1156 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1157 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1158  uint16_t val)
1159 {
1160  return atomic_fetch_sub_u16(dest, val);
1161 }
1162 #else
1163 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1164  uint16_t val)
1165 {
1166  uint16_t result = atomic_load_u16(dest);
1167  atomic_store_u16(dest, result - val);
1168  return result;
1169 }
1170 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1171 
1172 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1173 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1174  uint32_t val)
1175 {
1176  return atomic_fetch_sub_u32(dest, val);
1177 }
1178 #else
1179 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1180  uint32_t val)
1181 {
1182  uint32_t result = atomic_load_u32(dest);
1183  atomic_store_u32(dest, result - val);
1184  return result;
1185 }
1186 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1187 
1188 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1189 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1190  uint64_t val)
1191 {
1192  return atomic_fetch_sub_u64(dest, val);
1193 }
1194 #else
1195 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1196  uint64_t val)
1197 {
1198  uint64_t result = atomic_load_u64(dest);
1199  atomic_store_u64(dest, result - val);
1200  return result;
1201 }
1202 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1203 
1204 /* FETCH_OR */
1205 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1206 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1207  uint8_t val)
1208 {
1209  return atomic_fetch_or_u8(dest, val);
1210 }
1211 #else
1212 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1213  uint8_t val)
1214 {
1215  uint8_t result = atomic_load_u8(dest);
1216  atomic_store_u8(dest, result | val);
1217  return result;
1218 }
1219 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1220 
1221 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1222 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1223  uint16_t val)
1224 {
1225  return atomic_fetch_or_u16(dest, val);
1226 }
1227 #else
1228 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1229  uint16_t val)
1230 {
1231  uint16_t result = atomic_load_u16(dest);
1232  atomic_store_u16(dest, result | val);
1233  return result;
1234 }
1235 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1236 
1237 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1238 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1239  uint32_t val)
1240 {
1241  return atomic_fetch_or_u32(dest, val);
1242 }
1243 #else
1244 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1245  uint32_t val)
1246 {
1247  uint32_t result = atomic_load_u32(dest);
1248  atomic_store_u32(dest, result | val);
1249  return result;
1250 }
1251 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1252 
1253 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1254 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1255  uint64_t val)
1256 {
1257  return atomic_fetch_or_u64(dest, val);
1258 }
1259 #else
1260 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1261  uint64_t val)
1262 {
1263  uint64_t result = atomic_load_u64(dest);
1264  atomic_store_u64(dest, result | val);
1265  return result;
1266 }
1267 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1268 
1269 /* FETCH_XOR */
1270 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1271 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1272  uint8_t val)
1273 {
1274  return atomic_fetch_xor_u8(dest, val);
1275 }
1276 #else
1277 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1278  uint8_t val)
1279 {
1280  uint8_t result = atomic_load_u8(dest);
1281  atomic_store_u8(dest, result ^ val);
1282  return result;
1283 }
1284 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1285 
1286 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1287 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1288  uint16_t val)
1289 {
1290  return atomic_fetch_xor_u16(dest, val);
1291 }
1292 #else
1293 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1294  uint16_t val)
1295 {
1296  uint16_t result = atomic_load_u16(dest);
1297  atomic_store_u16(dest, result ^ val);
1298  return result;
1299 }
1300 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1301 
1302 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1303 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1304  uint32_t val)
1305 {
1306  return atomic_fetch_xor_u32(dest, val);
1307 }
1308 #else
1309 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1310  uint32_t val)
1311 {
1312  uint32_t result = atomic_load_u32(dest);
1313  atomic_store_u32(dest, result ^ val);
1314  return result;
1315 }
1316 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1317 
1318 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1319 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1320  uint64_t val)
1321 {
1322  return atomic_fetch_xor_u64(dest, val);
1323 }
1324 #else
1325 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1326  uint64_t val)
1327 {
1328  uint64_t result = atomic_load_u64(dest);
1329  atomic_store_u64(dest, result ^ val);
1330  return result;
1331 }
1332 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1333 
1334 /* FETCH_AND */
1335 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1336 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1337  uint8_t val)
1338 {
1339  return atomic_fetch_and_u8(dest, val);
1340 }
1341 #else
1342 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1343  uint8_t val)
1344 {
1345  uint8_t result = atomic_load_u8(dest);
1346  atomic_store_u8(dest, result & val);
1347  return result;
1348 }
1349 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1350 
1351 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1352 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1353  uint16_t val)
1354 {
1355  return atomic_fetch_and_u16(dest, val);
1356 }
1357 #else
1358 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1359  uint16_t val)
1360 {
1361  uint16_t result = atomic_load_u16(dest);
1362  atomic_store_u16(dest, result & val);
1363  return result;
1364 }
1365 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1366 
1367 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1368 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1369  uint32_t val)
1370 {
1371  return atomic_fetch_and_u32(dest, val);
1372 }
1373 #else
1374 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1375  uint32_t val)
1376 {
1377  uint32_t result = atomic_load_u32(dest);
1378  atomic_store_u32(dest, result & val);
1379  return result;
1380 }
1381 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1382 
1383 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1384 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1385  uint64_t val)
1386 {
1387  return atomic_fetch_and_u64(dest, val);
1388 }
1389 #else
1390 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1391  uint64_t val)
1392 {
1393  uint64_t result = atomic_load_u64(dest);
1394  atomic_store_u64(dest, result & val);
1395  return result;
1396 }
1397 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1398 
1399 #ifdef __cplusplus
1400 }
1401 #endif
1402 
1403 #endif /* ATOMIC_UTILS_H */
int16_t kernel_pid_t
Unique process identifier.
Definition: sched.h:139
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
Definition: atomic_utils.h:315
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
Definition: atomic_utils.h:929
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
Definition: atomic_utils.h:869
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
Definition: atomic_utils.h:274
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
Definition: atomic_utils.h:333
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
Definition: atomic_utils.h:265
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
Definition: atomic_utils.h:342
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
Definition: atomic_utils.h:248
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
Definition: atomic_utils.h:898
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:182
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:184
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:183
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:192
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:193
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:194
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:202
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:203
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:204
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:172
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:174
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:173