atomic_utils.h
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2020 Otto-von-Guericke-Universit├Ąt Magdeburg
3  *
4  * This file is subject to the terms and conditions of the GNU Lesser General
5  * Public License v2.1. See the file LICENSE in the top level directory for more
6  * details.
7  */
8 
136 #ifndef ATOMIC_UTILS_H
137 #define ATOMIC_UTILS_H
138 
139 #include <stdint.h>
140 
141 #include "irq.h"
142 #include "atomic_utils_arch.h"
143 
144 #ifdef __cplusplus
145 extern "C" {
146 #endif
147 
148 /* Declarations and documentation: */
149 
150 #if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
170 typedef struct {
171  volatile uint8_t *dest;
172  uint8_t mask;
174 
180 typedef struct {
181  volatile uint16_t *dest;
182  uint16_t mask;
184 
190 typedef struct {
191  volatile uint32_t *dest;
192  uint32_t mask;
194 
200 typedef struct {
201  volatile uint64_t *dest;
202  uint64_t mask;
205 #endif /* HAS_ATOMIC_BIT */
206 
217 static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
224 static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
231 static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
238 static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
250 static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
256 static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
262 static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
268 static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
281 static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
282  uint8_t summand);
289 static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
290  uint16_t summand);
297 static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
298  uint32_t summand);
305 static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
306  uint64_t summand);
320 static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
321  uint8_t subtrahend);
329 static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
330  uint16_t subtrahend);
338 static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
339  uint32_t subtrahend);
347 static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
348  uint64_t subtrahend);
362 static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
370 static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
371  uint16_t val);
379 static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
380  uint32_t val);
388 static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
389  uint64_t val);
403 static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
411 static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
412  uint16_t val);
420 static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
421  uint32_t val);
429 static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
430  uint64_t val);
444 static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
452 static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
453  uint16_t val);
461 static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
462  uint32_t val);
470 static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
471  uint64_t val);
483 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
484  uint8_t bit);
485 
491 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
492  uint8_t bit);
493 
499 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
500  uint8_t bit);
501 
507 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
508  uint8_t bit);
519 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
524 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
529 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
534 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
545 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
550 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
555 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
560 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
574 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
575  uint8_t summand);
583 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
584  uint16_t summand);
592 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
593  uint32_t summand);
601 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
602  uint64_t summand);
616 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
617  uint8_t subtrahend);
625 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
626  uint16_t subtrahend);
634 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
635  uint32_t subtrahend);
643 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
644  uint64_t subtrahend);
658 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
666 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
667  uint16_t val);
675 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
676  uint32_t val);
684 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
685  uint64_t val);
699 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
700  uint8_t val);
708 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
709  uint16_t val);
717 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
718  uint32_t val);
726 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
727  uint64_t val);
741 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
742  uint8_t val);
750 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
751  uint16_t val);
759 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
760  uint32_t val);
768 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
769  uint64_t val);
772 /* Fallback implementations of atomic utility functions: */
773 
777 #define CONCAT(a, b) a ## b
778 
782 #define CONCAT4(a, b, c, d) a ## b ## c ## d
783 
791 #define ATOMIC_LOAD_IMPL(name, type) \
792  static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
793  { \
794  unsigned state = irq_disable(); \
795  type result = *var; \
796  irq_restore(state); \
797  return result; \
798  }
799 
800 #ifndef HAS_ATOMIC_LOAD_U8
801 ATOMIC_LOAD_IMPL(u8, uint8_t)
802 #endif
803 #ifndef HAS_ATOMIC_LOAD_U16
804 ATOMIC_LOAD_IMPL(u16, uint16_t)
805 #endif
806 #ifndef HAS_ATOMIC_LOAD_U32
807 ATOMIC_LOAD_IMPL(u32, uint32_t)
808 #endif
809 #ifndef HAS_ATOMIC_LOAD_U64
810 ATOMIC_LOAD_IMPL(u64, uint64_t)
811 #endif
812 
820 #define ATOMIC_STORE_IMPL(name, type) \
821  static inline void CONCAT(atomic_store_, name) \
822  (volatile type *dest, type val) \
823  { \
824  unsigned state = irq_disable(); \
825  *dest = val; \
826  irq_restore(state); \
827  }
828 
829 #ifndef HAS_ATOMIC_STORE_U8
830 ATOMIC_STORE_IMPL(u8, uint8_t)
831 #endif
832 #ifndef HAS_ATOMIC_STORE_U16
833 ATOMIC_STORE_IMPL(u16, uint16_t)
834 #endif
835 #ifndef HAS_ATOMIC_STORE_U32
836 ATOMIC_STORE_IMPL(u32, uint32_t)
837 #endif
838 #ifndef HAS_ATOMIC_STORE_U64
839 ATOMIC_STORE_IMPL(u64, uint64_t)
840 #endif
841 
851 #define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
852  static inline type CONCAT4(atomic_fetch_, opname, _, name) \
853  (volatile type *dest, type val) \
854  { \
855  unsigned state = irq_disable(); \
856  const type result = *dest; \
857  *dest = result op val; \
858  irq_restore(state); \
859  return result; \
860  }
861 
862 #ifndef HAS_ATOMIC_FETCH_ADD_U8
863 ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
864 #endif
865 #ifndef HAS_ATOMIC_FETCH_ADD_U16
866 ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
867 #endif
868 #ifndef HAS_ATOMIC_FETCH_ADD_U32
869 ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
870 #endif
871 #ifndef HAS_ATOMIC_FETCH_ADD_U64
872 ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
873 #endif
874 
875 #ifndef HAS_ATOMIC_FETCH_SUB_U8
876 ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
877 #endif
878 #ifndef HAS_ATOMIC_FETCH_SUB_U16
879 ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
880 #endif
881 #ifndef HAS_ATOMIC_FETCH_SUB_U32
882 ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
883 #endif
884 #ifndef HAS_ATOMIC_FETCH_SUB_U64
885 ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
886 #endif
887 
888 #ifndef HAS_ATOMIC_FETCH_OR_U8
889 ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
890 #endif
891 #ifndef HAS_ATOMIC_FETCH_OR_U16
892 ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
893 #endif
894 #ifndef HAS_ATOMIC_FETCH_OR_U32
895 ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
896 #endif
897 #ifndef HAS_ATOMIC_FETCH_OR_U64
898 ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
899 #endif
900 
901 #ifndef HAS_ATOMIC_FETCH_XOR_U8
902 ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
903 #endif
904 #ifndef HAS_ATOMIC_FETCH_XOR_U16
905 ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
906 #endif
907 #ifndef HAS_ATOMIC_FETCH_XOR_U32
908 ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
909 #endif
910 #ifndef HAS_ATOMIC_FETCH_XOR_U64
911 ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
912 #endif
913 
914 #ifndef HAS_ATOMIC_FETCH_AND_U8
915 ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
916 #endif
917 #ifndef HAS_ATOMIC_FETCH_AND_U16
918 ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
919 #endif
920 #ifndef HAS_ATOMIC_FETCH_AND_U32
921 ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
922 #endif
923 #ifndef HAS_ATOMIC_FETCH_AND_U64
924 ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
925 #endif
926 
927 #ifndef HAS_ATOMIC_BIT
928 static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
929  uint8_t bit)
930 {
931  atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
932  return result;
933 }
934 static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
935  uint8_t bit)
936 {
937  atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
938  return result;
939 }
940 static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
941  uint8_t bit)
942 {
943  atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
944  return result;
945 }
946 static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
947  uint8_t bit)
948 {
949  atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
950  return result;
951 }
952 static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
953 {
954  atomic_fetch_or_u8(bit.dest, bit.mask);
955 }
956 static inline void atomic_set_bit_u16(atomic_bit_u16_t bit)
957 {
958  atomic_fetch_or_u16(bit.dest, bit.mask);
959 }
960 static inline void atomic_set_bit_u32(atomic_bit_u32_t bit)
961 {
962  atomic_fetch_or_u32(bit.dest, bit.mask);
963 }
964 static inline void atomic_set_bit_u64(atomic_bit_u64_t bit)
965 {
966  atomic_fetch_or_u64(bit.dest, bit.mask);
967 }
968 static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit)
969 {
970  atomic_fetch_and_u8(bit.dest, ~bit.mask);
971 }
972 static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit)
973 {
974  atomic_fetch_and_u16(bit.dest, ~bit.mask);
975 }
976 static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit)
977 {
978  atomic_fetch_and_u32(bit.dest, ~bit.mask);
979 }
980 static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit)
981 {
982  atomic_fetch_and_u64(bit.dest, ~bit.mask);
983 }
984 #endif
985 
986 /* Provide semi_atomic_*() functions on top.
987  *
988  * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
989  * - Else:
990  * - If matching `atomic_store_u<BITS>()` is provided: Only make final
991  * store atomic, as we can avoid touching the IRQ state register that
992  * way
993  * - Else: We need to disable and re-enable IRQs anyway, we just use the
994  * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
995  * as well
996  */
997 
998 /* FETCH_ADD */
999 #if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1000 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1001  uint8_t val)
1002 {
1003  return atomic_fetch_add_u8(dest, val);
1004 }
1005 #else
1006 static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1007  uint8_t val)
1008 {
1009  uint8_t result = atomic_load_u8(dest);
1010  atomic_store_u8(dest, result + val);
1011  return result;
1012 }
1013 #endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1014 
1015 #if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1016 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1017  uint16_t val)
1018 {
1019  return atomic_fetch_add_u16(dest, val);
1020 }
1021 #else
1022 static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1023  uint16_t val)
1024 {
1025  uint16_t result = atomic_load_u16(dest);
1026  atomic_store_u16(dest, result + val);
1027  return result;
1028 }
1029 #endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1030 
1031 #if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1032 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1033  uint32_t val)
1034 {
1035  return atomic_fetch_add_u32(dest, val);
1036 }
1037 #else
1038 static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1039  uint32_t val)
1040 {
1041  uint32_t result = atomic_load_u32(dest);
1042  atomic_store_u32(dest, result + val);
1043  return result;
1044 }
1045 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1046 
1047 #if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1048 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1049  uint64_t val)
1050 {
1051  return atomic_fetch_add_u64(dest, val);
1052 }
1053 #else
1054 static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1055  uint64_t val)
1056 {
1057  atomic_store_u64(dest, *dest + val);
1058 }
1059 #endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1060 
1061 /* FETCH_SUB */
1062 #if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1063 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1064  uint8_t val)
1065 {
1066  return atomic_fetch_sub_u8(dest, val);
1067 }
1068 #else
1069 static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1070  uint8_t val)
1071 {
1072  uint8_t result = atomic_load_u8(dest);
1073  atomic_store_u8(dest, result - val);
1074  return result;
1075 }
1076 #endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1077 
1078 #if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1079 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1080  uint16_t val)
1081 {
1082  return atomic_fetch_sub_u16(dest, val);
1083 }
1084 #else
1085 static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1086  uint16_t val)
1087 {
1088  uint16_t result = atomic_load_u16(dest);
1089  atomic_store_u16(dest, result - val);
1090  return result;
1091 }
1092 #endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1093 
1094 #if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1095 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1096  uint32_t val)
1097 {
1098  return atomic_fetch_sub_u32(dest, val);
1099 }
1100 #else
1101 static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1102  uint32_t val)
1103 {
1104  uint32_t result = atomic_load_u32(dest);
1105  atomic_store_u32(dest, result - val);
1106  return result;
1107 }
1108 #endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1109 
1110 #if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1111 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1112  uint64_t val)
1113 {
1114  return atomic_fetch_sub_u64(dest, val);
1115 }
1116 #else
1117 static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1118  uint64_t val)
1119 {
1120  uint64_t result = atomic_load_u64(dest);
1121  atomic_store_u64(dest, result - val);
1122  return result;
1123 }
1124 #endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1125 
1126 /* FETCH_OR */
1127 #if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1128 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1129  uint8_t val)
1130 {
1131  return atomic_fetch_or_u8(dest, val);
1132 }
1133 #else
1134 static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1135  uint8_t val)
1136 {
1137  uint8_t result = atomic_load_u8(dest);
1138  atomic_store_u8(dest, result | val);
1139  return result;
1140 }
1141 #endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1142 
1143 #if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1144 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1145  uint16_t val)
1146 {
1147  return atomic_fetch_or_u16(dest, val);
1148 }
1149 #else
1150 static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1151  uint16_t val)
1152 {
1153  uint16_t result = atomic_load_u16(dest);
1154  atomic_store_u16(dest, result | val);
1155  return result;
1156 }
1157 #endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1158 
1159 #if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1160 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1161  uint32_t val)
1162 {
1163  return atomic_fetch_or_u32(dest, val);
1164 }
1165 #else
1166 static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1167  uint32_t val)
1168 {
1169  uint32_t result = atomic_load_u32(dest);
1170  atomic_store_u32(dest, result | val);
1171  return result;
1172 }
1173 #endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1174 
1175 #if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1176 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1177  uint64_t val)
1178 {
1179  return atomic_fetch_or_u64(dest, val);
1180 }
1181 #else
1182 static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1183  uint64_t val)
1184 {
1185  uint64_t result = atomic_load_u64(dest);
1186  atomic_store_u64(dest, result | val);
1187  return result;
1188 }
1189 #endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1190 
1191 /* FETCH_XOR */
1192 #if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1193 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1194  uint8_t val)
1195 {
1196  return atomic_fetch_xor_u8(dest, val);
1197 }
1198 #else
1199 static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1200  uint8_t val)
1201 {
1202  uint8_t result = atomic_load_u8(dest);
1203  atomic_store_u8(dest, result ^ val);
1204  return result;
1205 }
1206 #endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1207 
1208 #if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1209 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1210  uint16_t val)
1211 {
1212  return atomic_fetch_xor_u16(dest, val);
1213 }
1214 #else
1215 static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1216  uint16_t val)
1217 {
1218  uint16_t result = atomic_load_u16(dest);
1219  atomic_store_u16(dest, result ^ val);
1220  return result;
1221 }
1222 #endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1223 
1224 #if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1225 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1226  uint32_t val)
1227 {
1228  return atomic_fetch_xor_u32(dest, val);
1229 }
1230 #else
1231 static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1232  uint32_t val)
1233 {
1234  uint32_t result = atomic_load_u32(dest);
1235  atomic_store_u32(dest, result ^ val);
1236  return result;
1237 }
1238 #endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1239 
1240 #if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1241 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1242  uint64_t val)
1243 {
1244  return atomic_fetch_xor_u64(dest, val);
1245 }
1246 #else
1247 static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1248  uint64_t val)
1249 {
1250  uint64_t result = atomic_load_u64(dest);
1251  atomic_store_u64(dest, result ^ val);
1252  return result;
1253 }
1254 #endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1255 
1256 /* FETCH_AND */
1257 #if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1258 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1259  uint8_t val)
1260 {
1261  return atomic_fetch_and_u8(dest, val);
1262 }
1263 #else
1264 static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1265  uint8_t val)
1266 {
1267  uint8_t result = atomic_load_u8(dest);
1268  atomic_store_u8(dest, result & val);
1269  return result;
1270 }
1271 #endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1272 
1273 #if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1274 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1275  uint16_t val)
1276 {
1277  return atomic_fetch_and_u16(dest, val);
1278 }
1279 #else
1280 static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1281  uint16_t val)
1282 {
1283  uint16_t result = atomic_load_u16(dest);
1284  atomic_store_u16(dest, result & val);
1285  return result;
1286 }
1287 #endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1288 
1289 #if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1290 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1291  uint32_t val)
1292 {
1293  return atomic_fetch_and_u32(dest, val);
1294 }
1295 #else
1296 static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1297  uint32_t val)
1298 {
1299  uint32_t result = atomic_load_u32(dest);
1300  atomic_store_u32(dest, result & val);
1301  return result;
1302 }
1303 #endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1304 
1305 #if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1306 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1307  uint64_t val)
1308 {
1309  return atomic_fetch_and_u64(dest, val);
1310 }
1311 #else
1312 static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1313  uint64_t val)
1314 {
1315  uint64_t result = atomic_load_u64(dest);
1316  atomic_store_u64(dest, result & val);
1317  return result;
1318 }
1319 #endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1320 
1321 #ifdef __cplusplus
1322 }
1323 #endif
1324 
1325 #endif /* ATOMIC_UTILS_H */
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
Definition: atomic_utils.h:940
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:952
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
Definition: atomic_utils.h:851
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
Definition: atomic_utils.h:946
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
Definition: atomic_utils.h:791
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:976
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:964
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:968
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
Definition: atomic_utils.h:928
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:972
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
Definition: atomic_utils.h:934
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
Definition: atomic_utils.h:820
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:960
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
Definition: atomic_utils.h:980
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
Definition: atomic_utils.h:956
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Type specifying a bit in an uint16_t
Definition: atomic_utils.h:180
uint16_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:182
volatile uint16_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:181
Type specifying a bit in an uint32_t
Definition: atomic_utils.h:190
volatile uint32_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:191
uint32_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:192
Type specifying a bit in an uint64_t
Definition: atomic_utils.h:200
volatile uint64_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:201
uint64_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:202
Type specifying a bit in an uint8_t
Definition: atomic_utils.h:170
uint8_t mask
Bitmask used for setting the bit.
Definition: atomic_utils.h:172
volatile uint8_t * dest
Memory containing the bit to set/clear.
Definition: atomic_utils.h:171