Loading...
Searching...
No Matches
atomic_utils.h
Go to the documentation of this file.
1/*
2 * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3 *
4 * This file is subject to the terms and conditions of the GNU Lesser General
5 * Public License v2.1. See the file LICENSE in the top level directory for more
6 * details.
7 */
8
9#pragma once
10
137
138#include <limits.h>
139#include <stdint.h>
140
141#include "irq.h"
142#include "macros/utils.h"
143#include "sched.h"
144
145#include "atomic_utils_arch.h" /* IWYU pragma: export */
146
147#ifdef __cplusplus
148extern "C" {
149#endif
150
151/* NOLINTBEGIN(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name)
152 *
153 * The macros ATOMIC_LOAD_IMPL() and friends do not surround the argument used
154 * to pass the type with parenthesis. Suppressing the clang-tidy warning here,
155 * as adding parenthesis around a type would be a synstax error.
156 *
157 * The macro ATOMIC_FETCH_OP_IMPL() uses `val` as argument value. But we want
158 * the declaration may be more specific (e.g. summand instead of val).
159 */
160
161/* Declarations and documentation: */
162
163#if !defined(HAS_ATOMIC_BIT) || defined(DOXYGEN)
183typedef struct {
184 volatile uint8_t *dest;
185 uint8_t mask;
187
193typedef struct {
194 volatile uint16_t *dest;
195 uint16_t mask;
197
203typedef struct {
204 volatile uint32_t *dest;
205 uint32_t mask;
207
213typedef struct {
214 volatile uint64_t *dest;
215 uint64_t mask;
217
219#endif /* HAS_ATOMIC_BIT */
220
224#if UINT_MAX == UINT16_MAX
226#elif UINT_MAX == UINT32_MAX
228#else
230#endif
231
242static inline uint8_t atomic_load_u8(const volatile uint8_t *var);
249static inline uint16_t atomic_load_u16(const volatile uint16_t *var);
256static inline uint32_t atomic_load_u32(const volatile uint32_t *var);
263static inline uint64_t atomic_load_u64(const volatile uint64_t *var);
274static inline unsigned atomic_load_unsigned(const volatile unsigned *var)
275{
276 if (sizeof(uint64_t) == sizeof(unsigned)) {
277 return atomic_load_u64((volatile void *)var);
278 }
279
280 if (sizeof(uint32_t) == sizeof(unsigned)) {
281 return atomic_load_u32((volatile void *)var);
282 }
283
284 return atomic_load_u16((volatile void *)var);
285}
286
293static inline uintptr_t atomic_load_uintptr(const volatile uintptr_t *var) {
294 if (sizeof(uintptr_t) == 2) {
295 return atomic_load_u16((const volatile uint16_t *)var);
296 }
297
298 if (sizeof(uintptr_t) == 4) {
299 return atomic_load_u32((const volatile uint32_t *)(uintptr_t)var);
300 }
301
302 return atomic_load_u64((const volatile uint64_t *)(uintptr_t)var);
303}
304
310static inline void * atomic_load_ptr(void **ptr_addr) {
311 return (void *)atomic_load_uintptr((const volatile uintptr_t *)ptr_addr);
312}
313
319static inline kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
320{
321 return (kernel_pid_t)atomic_load_u16((const volatile uint16_t *)var);
322}
323
324
334static inline void atomic_store_u8(volatile uint8_t *dest, uint8_t val);
340static inline void atomic_store_u16(volatile uint16_t *dest, uint16_t val);
346static inline void atomic_store_u32(volatile uint32_t *dest, uint32_t val);
352static inline void atomic_store_u64(volatile uint64_t *dest, uint64_t val);
362static inline void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
363{
364 if (sizeof(uint64_t) == sizeof(unsigned)) {
365 atomic_store_u64((volatile void *)dest, val);
366 }
367 else if (sizeof(uint32_t) == sizeof(unsigned)) {
368 atomic_store_u32((volatile void *)dest, val);
369 }
370 else {
371 atomic_store_u16((volatile void *)dest, val);
372 }
373}
374
381static inline void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
382{
383 if (sizeof(uintptr_t) == 2) {
384 atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
385 }
386 else if (sizeof(uintptr_t) == 4) {
387 atomic_store_u32((volatile uint32_t *)(uintptr_t)dest, (uint32_t)val);
388 }
389 else {
390 atomic_store_u64((volatile uint64_t *)(uintptr_t)dest, (uint64_t)val);
391 }
392}
393
399static inline void atomic_store_ptr(void **dest, const void *val) {
400 atomic_store_uintptr((volatile uintptr_t *)dest, (uintptr_t)val);
401}
402
408static inline void atomic_store_kernel_pid(volatile kernel_pid_t *dest,
409 kernel_pid_t val)
410{
411 atomic_store_u16((volatile uint16_t *)dest, (uint16_t)val);
412}
413
414
425static inline uint8_t atomic_fetch_add_u8(volatile uint8_t *dest,
426 uint8_t summand);
433static inline uint16_t atomic_fetch_add_u16(volatile uint16_t *dest,
434 uint16_t summand);
441static inline uint32_t atomic_fetch_add_u32(volatile uint32_t *dest,
442 uint32_t summand);
449static inline uint64_t atomic_fetch_add_u64(volatile uint64_t *dest,
450 uint64_t summand);
461static inline unsigned atomic_fetch_add_unsigned(volatile unsigned *dest,
462 unsigned summand)
463{
464 if (sizeof(unsigned) == sizeof(uint64_t)) {
465 return atomic_fetch_add_u64((volatile void *)dest, summand);
466 }
467
468 if (sizeof(unsigned) == sizeof(uint32_t)) {
469 return atomic_fetch_add_u32((volatile void *)dest, summand);
470 }
471
472 return atomic_fetch_add_u16((volatile void *)dest, summand);
473}
474
475
487static inline uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest,
488 uint8_t subtrahend);
496static inline uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest,
497 uint16_t subtrahend);
505static inline uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest,
506 uint32_t subtrahend);
514static inline uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest,
515 uint64_t subtrahend);
527static inline unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest,
528 unsigned subtrahend)
529{
530 if (sizeof(unsigned) == sizeof(uint64_t)) {
531 return atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
532 }
533
534 if (sizeof(unsigned) == sizeof(uint32_t)) {
535 return atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
536 }
537
538 return atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
539}
540
541
553static inline uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
561static inline uint16_t atomic_fetch_or_u16(volatile uint16_t *dest,
562 uint16_t val);
570static inline uint32_t atomic_fetch_or_u32(volatile uint32_t *dest,
571 uint32_t val);
579static inline uint64_t atomic_fetch_or_u64(volatile uint64_t *dest,
580 uint64_t val);
592static inline unsigned atomic_fetch_or_unsigned(volatile unsigned *dest,
593 unsigned val)
594{
595 if (sizeof(unsigned) == sizeof(uint64_t)) {
596 return atomic_fetch_or_u64((volatile void *)dest, val);
597 }
598
599 if (sizeof(unsigned) == sizeof(uint32_t)) {
600 return atomic_fetch_or_u32((volatile void *)dest, val);
601 }
602
603 return atomic_fetch_or_u16((volatile void *)dest, val);
604}
605
606
618static inline uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val);
626static inline uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest,
627 uint16_t val);
635static inline uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest,
636 uint32_t val);
644static inline uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest,
645 uint64_t val);
657static inline unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest,
658 unsigned val)
659{
660 if (sizeof(unsigned) == sizeof(uint64_t)) {
661 return atomic_fetch_xor_u64((volatile void *)dest, val);
662 }
663
664 if (sizeof(unsigned) == sizeof(uint32_t)) {
665 return atomic_fetch_xor_u32((volatile void *)dest, val);
666 }
667
668 return atomic_fetch_xor_u16((volatile void *)dest, val);
669}
670
671
683static inline uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val);
691static inline uint16_t atomic_fetch_and_u16(volatile uint16_t *dest,
692 uint16_t val);
700static inline uint32_t atomic_fetch_and_u32(volatile uint32_t *dest,
701 uint32_t val);
709static inline uint64_t atomic_fetch_and_u64(volatile uint64_t *dest,
710 uint64_t val);
722static inline unsigned atomic_fetch_and_unsigned(volatile unsigned *dest,
723 unsigned val)
724{
725 if (sizeof(unsigned) == sizeof(uint64_t)) {
726 return atomic_fetch_and_u64((volatile void *)dest, val);
727 }
728
729 if (sizeof(unsigned) == sizeof(uint32_t)) {
730 return atomic_fetch_and_u32((volatile void *)dest, val);
731 }
732
733 return atomic_fetch_and_u16((volatile void *)dest, val);
734}
735
736
748static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
749 uint8_t bit);
750
758static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
759 uint8_t bit);
760
768static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
769 uint8_t bit);
770
778static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
779 uint8_t bit);
780
788static inline atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest,
789 uint8_t bit)
790{
791 /* Some archs define uint32_t as unsigned long, uint16_t as short etc.,
792 * we need to cast. */
793#if UINT_MAX == UINT16_MAX
794 return atomic_bit_u16((uint16_t volatile *)dest, bit);
795#elif UINT_MAX == UINT32_MAX
796 return atomic_bit_u32((uint32_t volatile *)dest, bit);
797#else
798 return atomic_bit_u64((uint64_t volatile *)dest, bit);
799#endif
800}
801
802
811static inline void atomic_set_bit_u8(atomic_bit_u8_t bit);
816static inline void atomic_set_bit_u16(atomic_bit_u16_t bit);
821static inline void atomic_set_bit_u32(atomic_bit_u32_t bit);
826static inline void atomic_set_bit_u64(atomic_bit_u64_t bit);
832{
833#if UINT_MAX == UINT16_MAX
835#elif UINT_MAX == UINT32_MAX
837#else
839#endif
840}
841
842
851static inline void atomic_clear_bit_u8(atomic_bit_u8_t bit);
856static inline void atomic_clear_bit_u16(atomic_bit_u16_t bit);
861static inline void atomic_clear_bit_u32(atomic_bit_u32_t bit);
866static inline void atomic_clear_bit_u64(atomic_bit_u64_t bit);
872{
873#if UINT_MAX == UINT16_MAX
875#elif UINT_MAX == UINT32_MAX
877#else
879#endif
880}
881
882
894static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
895 uint8_t summand);
903static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
904 uint16_t summand);
912static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
913 uint32_t summand);
921static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
922 uint64_t summand);
934static inline unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest,
935 unsigned summand)
936{
937 if (sizeof(unsigned) == sizeof(uint64_t)) {
938 return semi_atomic_fetch_add_u64((volatile void *)dest, summand);
939 }
940
941 if (sizeof(unsigned) == sizeof(uint32_t)) {
942 return semi_atomic_fetch_add_u32((volatile void *)dest, summand);
943 }
944
945 return semi_atomic_fetch_add_u16((volatile void *)dest, summand);
946}
947
948
960static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
961 uint8_t subtrahend);
969static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
970 uint16_t subtrahend);
978static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
979 uint32_t subtrahend);
987static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
988 uint64_t subtrahend);
1000static inline unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest,
1001 unsigned subtrahend)
1002{
1003 if (sizeof(unsigned) == sizeof(uint64_t)) {
1004 return semi_atomic_fetch_sub_u64((volatile void *)dest, subtrahend);
1005 }
1006
1007 if (sizeof(unsigned) == sizeof(uint32_t)) {
1008 return semi_atomic_fetch_sub_u32((volatile void *)dest, subtrahend);
1009 }
1010
1011 return semi_atomic_fetch_sub_u16((volatile void *)dest, subtrahend);
1012}
1013
1014
1026static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val);
1034static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1035 uint16_t val);
1043static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1044 uint32_t val);
1052static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1053 uint64_t val);
1065static inline unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest,
1066 unsigned val)
1067{
1068 if (sizeof(unsigned) == sizeof(uint64_t)) {
1069 return semi_atomic_fetch_or_u64((volatile void *)dest, val);
1070 }
1071
1072 if (sizeof(unsigned) == sizeof(uint32_t)) {
1073 return semi_atomic_fetch_or_u32((volatile void *)dest, val);
1074 }
1075
1076 return semi_atomic_fetch_or_u16((volatile void *)dest, val);
1077}
1078
1079
1091static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1092 uint8_t val);
1100static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1101 uint16_t val);
1109static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1110 uint32_t val);
1118static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1119 uint64_t val);
1131static inline unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest,
1132 unsigned val)
1133{
1134 if (sizeof(unsigned) == sizeof(uint64_t)) {
1135 return semi_atomic_fetch_xor_u64((volatile void *)dest, val);
1136 }
1137
1138 if (sizeof(unsigned) == sizeof(uint32_t)) {
1139 return semi_atomic_fetch_xor_u32((volatile void *)dest, val);
1140 }
1141
1142 return semi_atomic_fetch_xor_u16((volatile void *)dest, val);
1143}
1144
1145
1157static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1158 uint8_t val);
1166static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1167 uint16_t val);
1175static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1176 uint32_t val);
1184static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1185 uint64_t val);
1197static inline unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest,
1198 unsigned val)
1199{
1200 if (sizeof(unsigned) == sizeof(uint64_t)) {
1201 return semi_atomic_fetch_and_u64((volatile void *)dest, val);
1202 }
1203
1204 if (sizeof(unsigned) == sizeof(uint32_t)) {
1205 return semi_atomic_fetch_and_u32((volatile void *)dest, val);
1206 }
1207
1208 return semi_atomic_fetch_and_u16((volatile void *)dest, val);
1209}
1210
1211
1212/* Fallback implementations of atomic utility functions: */
1213
1221#define ATOMIC_LOAD_IMPL(name, type) \
1222 static inline type CONCAT(atomic_load_, name)(const volatile type *var) \
1223 { \
1224 unsigned state = irq_disable(); \
1225 type result = *var; \
1226 irq_restore(state); \
1227 return result; \
1228 }
1229
1230#ifndef HAS_ATOMIC_LOAD_U8
1231ATOMIC_LOAD_IMPL(u8, uint8_t)
1232#endif
1233#ifndef HAS_ATOMIC_LOAD_U16
1234ATOMIC_LOAD_IMPL(u16, uint16_t)
1235#endif
1236#ifndef HAS_ATOMIC_LOAD_U32
1237ATOMIC_LOAD_IMPL(u32, uint32_t)
1238#endif
1239#ifndef HAS_ATOMIC_LOAD_U64
1240ATOMIC_LOAD_IMPL(u64, uint64_t)
1241#endif
1242
1250#define ATOMIC_STORE_IMPL(name, type) \
1251 static inline void CONCAT(atomic_store_, name) \
1252 (volatile type *dest, type val) \
1253 { \
1254 unsigned state = irq_disable(); \
1255 *dest = val; \
1256 irq_restore(state); \
1257 }
1258
1259#ifndef HAS_ATOMIC_STORE_U8
1260ATOMIC_STORE_IMPL(u8, uint8_t)
1261#endif
1262#ifndef HAS_ATOMIC_STORE_U16
1263ATOMIC_STORE_IMPL(u16, uint16_t)
1264#endif
1265#ifndef HAS_ATOMIC_STORE_U32
1266ATOMIC_STORE_IMPL(u32, uint32_t)
1267#endif
1268#ifndef HAS_ATOMIC_STORE_U64
1269ATOMIC_STORE_IMPL(u64, uint64_t)
1270#endif
1271
1281#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type) \
1282 static inline type CONCAT4(atomic_fetch_, opname, _, name) \
1283 (volatile type *dest, type val) \
1284 { \
1285 unsigned state = irq_disable(); \
1286 const type result = *dest; \
1287 *dest = result op val; \
1288 irq_restore(state); \
1289 return result; \
1290 }
1291
1292#ifndef HAS_ATOMIC_FETCH_ADD_U8
1293ATOMIC_FETCH_OP_IMPL(add, +, u8, uint8_t)
1294#endif
1295#ifndef HAS_ATOMIC_FETCH_ADD_U16
1296ATOMIC_FETCH_OP_IMPL(add, +, u16, uint16_t)
1297#endif
1298#ifndef HAS_ATOMIC_FETCH_ADD_U32
1299ATOMIC_FETCH_OP_IMPL(add, +, u32, uint32_t)
1300#endif
1301#ifndef HAS_ATOMIC_FETCH_ADD_U64
1302ATOMIC_FETCH_OP_IMPL(add, +, u64, uint64_t)
1303#endif
1304
1305#ifndef HAS_ATOMIC_FETCH_SUB_U8
1306ATOMIC_FETCH_OP_IMPL(sub, -, u8, uint8_t)
1307#endif
1308#ifndef HAS_ATOMIC_FETCH_SUB_U16
1309ATOMIC_FETCH_OP_IMPL(sub, -, u16, uint16_t)
1310#endif
1311#ifndef HAS_ATOMIC_FETCH_SUB_U32
1312ATOMIC_FETCH_OP_IMPL(sub, -, u32, uint32_t)
1313#endif
1314#ifndef HAS_ATOMIC_FETCH_SUB_U64
1315ATOMIC_FETCH_OP_IMPL(sub, -, u64, uint64_t)
1316#endif
1317
1318#ifndef HAS_ATOMIC_FETCH_OR_U8
1319ATOMIC_FETCH_OP_IMPL(or, |, u8, uint8_t)
1320#endif
1321#ifndef HAS_ATOMIC_FETCH_OR_U16
1322ATOMIC_FETCH_OP_IMPL(or, |, u16, uint16_t)
1323#endif
1324#ifndef HAS_ATOMIC_FETCH_OR_U32
1325ATOMIC_FETCH_OP_IMPL(or, |, u32, uint32_t)
1326#endif
1327#ifndef HAS_ATOMIC_FETCH_OR_U64
1328ATOMIC_FETCH_OP_IMPL(or, |, u64, uint64_t)
1329#endif
1330
1331#ifndef HAS_ATOMIC_FETCH_XOR_U8
1332ATOMIC_FETCH_OP_IMPL(xor, ^, u8, uint8_t)
1333#endif
1334#ifndef HAS_ATOMIC_FETCH_XOR_U16
1335ATOMIC_FETCH_OP_IMPL(xor, ^, u16, uint16_t)
1336#endif
1337#ifndef HAS_ATOMIC_FETCH_XOR_U32
1338ATOMIC_FETCH_OP_IMPL(xor, ^, u32, uint32_t)
1339#endif
1340#ifndef HAS_ATOMIC_FETCH_XOR_U64
1341ATOMIC_FETCH_OP_IMPL(xor, ^, u64, uint64_t)
1342#endif
1343
1344#ifndef HAS_ATOMIC_FETCH_AND_U8
1345ATOMIC_FETCH_OP_IMPL(and, &, u8, uint8_t)
1346#endif
1347#ifndef HAS_ATOMIC_FETCH_AND_U16
1348ATOMIC_FETCH_OP_IMPL(and, &, u16, uint16_t)
1349#endif
1350#ifndef HAS_ATOMIC_FETCH_AND_U32
1351ATOMIC_FETCH_OP_IMPL(and, &, u32, uint32_t)
1352#endif
1353#ifndef HAS_ATOMIC_FETCH_AND_U64
1354ATOMIC_FETCH_OP_IMPL(and, &, u64, uint64_t)
1355#endif
1356
1357#ifndef HAS_ATOMIC_BIT
1358static inline atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest,
1359 uint8_t bit)
1360{
1361 atomic_bit_u8_t result = { .dest = dest, .mask = 1U << bit };
1362 return result;
1363}
1364static inline atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest,
1365 uint8_t bit)
1366{
1367 atomic_bit_u16_t result = { .dest = dest, .mask = 1U << bit };
1368 return result;
1369}
1370static inline atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest,
1371 uint8_t bit)
1372{
1373 atomic_bit_u32_t result = { .dest = dest, .mask = 1UL << bit };
1374 return result;
1375}
1376static inline atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest,
1377 uint8_t bit)
1378{
1379 atomic_bit_u64_t result = { .dest = dest, .mask = 1ULL << bit };
1380 return result;
1381}
1382static inline void atomic_set_bit_u8(atomic_bit_u8_t bit)
1383{
1384 atomic_fetch_or_u8(bit.dest, bit.mask);
1385}
1387{
1388 atomic_fetch_or_u16(bit.dest, bit.mask);
1389}
1391{
1392 atomic_fetch_or_u32(bit.dest, bit.mask);
1393}
1395{
1396 atomic_fetch_or_u64(bit.dest, bit.mask);
1397}
1399{
1400 atomic_fetch_and_u8(bit.dest, ~bit.mask);
1401}
1403{
1404 atomic_fetch_and_u16(bit.dest, ~bit.mask);
1405}
1407{
1408 atomic_fetch_and_u32(bit.dest, ~bit.mask);
1409}
1411{
1412 atomic_fetch_and_u64(bit.dest, ~bit.mask);
1413}
1414#endif
1415
1416/* Provide semi_atomic_*() functions on top.
1417 *
1418 * - If atomic_<FOO>() is provided: Use this for semi_atomic_<FOO>() as well
1419 * - Else:
1420 * - If matching `atomic_store_u<BITS>()` is provided: Only make final
1421 * store atomic, as we can avoid touching the IRQ state register that
1422 * way
1423 * - Else: We need to disable and re-enable IRQs anyway, we just use the
1424 * fallback implementation of `atomic_<FOO>()` for `semi_atomic<FOO>()`
1425 * as well
1426 */
1427
1428/* FETCH_ADD */
1429#if defined(HAS_ATOMIC_FETCH_ADD_U8) || !defined(HAS_ATOMIC_STORE_U8)
1430static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1431 uint8_t val)
1432{
1433 return atomic_fetch_add_u8(dest, val);
1434}
1435#else
1436static inline uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest,
1437 uint8_t val)
1438{
1439 uint8_t result = atomic_load_u8(dest);
1440 atomic_store_u8(dest, result + val);
1441 return result;
1442}
1443#endif /* HAS_ATOMIC_FETCH_ADD_U8 || !HAS_ATOMIC_STORE_U8 */
1444
1445#if defined(HAS_ATOMIC_FETCH_ADD_U16) || !defined(HAS_ATOMIC_STORE_U16)
1446static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1447 uint16_t val)
1448{
1449 return atomic_fetch_add_u16(dest, val);
1450}
1451#else
1452static inline uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest,
1453 uint16_t val)
1454{
1455 uint16_t result = atomic_load_u16(dest);
1456 atomic_store_u16(dest, result + val);
1457 return result;
1458}
1459#endif /* HAS_ATOMIC_FETCH_ADD_U16 || !HAS_ATOMIC_STORE_U16 */
1460
1461#if defined(HAS_ATOMIC_FETCH_ADD_U32) || !defined(HAS_ATOMIC_STORE_U32)
1462static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1463 uint32_t val)
1464{
1465 return atomic_fetch_add_u32(dest, val);
1466}
1467#else
1468static inline uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest,
1469 uint32_t val)
1470{
1471 uint32_t result = atomic_load_u32(dest);
1472 atomic_store_u32(dest, result + val);
1473 return result;
1474}
1475#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1476
1477#if defined(HAS_ATOMIC_FETCH_ADD_U64) || !defined(HAS_ATOMIC_STORE_U64)
1478static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1479 uint64_t val)
1480{
1481 return atomic_fetch_add_u64(dest, val);
1482}
1483#else
1484static inline uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest,
1485 uint64_t val)
1486{
1487 atomic_store_u64(dest, *dest + val);
1488}
1489#endif /* HAS_ATOMIC_FETCH_ADD_U32 || !HAS_ATOMIC_STORE_U32 */
1490
1491/* FETCH_SUB */
1492#if defined(HAS_ATOMIC_FETCH_SUB_U8) || !defined(HAS_ATOMIC_STORE_U8)
1493static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1494 uint8_t val)
1495{
1496 return atomic_fetch_sub_u8(dest, val);
1497}
1498#else
1499static inline uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest,
1500 uint8_t val)
1501{
1502 uint8_t result = atomic_load_u8(dest);
1503 atomic_store_u8(dest, result - val);
1504 return result;
1505}
1506#endif /* HAS_ATOMIC_FETCH_SUB_U8 || !HAS_ATOMIC_STORE_U8 */
1507
1508#if defined(HAS_ATOMIC_FETCH_SUB_U16) || !defined(HAS_ATOMIC_STORE_U16)
1509static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1510 uint16_t val)
1511{
1512 return atomic_fetch_sub_u16(dest, val);
1513}
1514#else
1515static inline uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest,
1516 uint16_t val)
1517{
1518 uint16_t result = atomic_load_u16(dest);
1519 atomic_store_u16(dest, result - val);
1520 return result;
1521}
1522#endif /* HAS_ATOMIC_FETCH_SUB_U16 || !HAS_ATOMIC_STORE_U16 */
1523
1524#if defined(HAS_ATOMIC_FETCH_SUB_U32) || !defined(HAS_ATOMIC_STORE_U32)
1525static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1526 uint32_t val)
1527{
1528 return atomic_fetch_sub_u32(dest, val);
1529}
1530#else
1531static inline uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest,
1532 uint32_t val)
1533{
1534 uint32_t result = atomic_load_u32(dest);
1535 atomic_store_u32(dest, result - val);
1536 return result;
1537}
1538#endif /* HAS_ATOMIC_FETCH_SUB_U32 || !HAS_ATOMIC_STORE_U64 */
1539
1540#if defined(HAS_ATOMIC_FETCH_SUB_U64) || !defined(HAS_ATOMIC_STORE_U64)
1541static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1542 uint64_t val)
1543{
1544 return atomic_fetch_sub_u64(dest, val);
1545}
1546#else
1547static inline uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest,
1548 uint64_t val)
1549{
1550 uint64_t result = atomic_load_u64(dest);
1551 atomic_store_u64(dest, result - val);
1552 return result;
1553}
1554#endif /* HAS_ATOMIC_FETCH_SUB_U64 || !HAS_ATOMIC_STORE_U64 */
1555
1556/* FETCH_OR */
1557#if defined(HAS_ATOMIC_FETCH_OR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1558static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1559 uint8_t val)
1560{
1561 return atomic_fetch_or_u8(dest, val);
1562}
1563#else
1564static inline uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest,
1565 uint8_t val)
1566{
1567 uint8_t result = atomic_load_u8(dest);
1568 atomic_store_u8(dest, result | val);
1569 return result;
1570}
1571#endif /* HAS_ATOMIC_FETCH_OR_U8 || !HAS_ATOMIC_STORE_U8 */
1572
1573#if defined(HAS_ATOMIC_FETCH_OR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1574static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1575 uint16_t val)
1576{
1577 return atomic_fetch_or_u16(dest, val);
1578}
1579#else
1580static inline uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest,
1581 uint16_t val)
1582{
1583 uint16_t result = atomic_load_u16(dest);
1584 atomic_store_u16(dest, result | val);
1585 return result;
1586}
1587#endif /* HAS_ATOMIC_FETCH_OR_U16 || !HAS_ATOMIC_STORE_U16 */
1588
1589#if defined(HAS_ATOMIC_FETCH_OR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1590static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1591 uint32_t val)
1592{
1593 return atomic_fetch_or_u32(dest, val);
1594}
1595#else
1596static inline uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest,
1597 uint32_t val)
1598{
1599 uint32_t result = atomic_load_u32(dest);
1600 atomic_store_u32(dest, result | val);
1601 return result;
1602}
1603#endif /* HAS_ATOMIC_FETCH_OR_U32 || !HAS_ATOMIC_STORE_U32 */
1604
1605#if defined(HAS_ATOMIC_FETCH_OR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1606static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1607 uint64_t val)
1608{
1609 return atomic_fetch_or_u64(dest, val);
1610}
1611#else
1612static inline uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest,
1613 uint64_t val)
1614{
1615 uint64_t result = atomic_load_u64(dest);
1616 atomic_store_u64(dest, result | val);
1617 return result;
1618}
1619#endif /* HAS_ATOMIC_FETCH_OR_U64 || !HAS_ATOMIC_STORE_U64 */
1620
1621/* FETCH_XOR */
1622#if defined(HAS_ATOMIC_FETCH_XOR_U8) || !defined(HAS_ATOMIC_STORE_U8)
1623static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1624 uint8_t val)
1625{
1626 return atomic_fetch_xor_u8(dest, val);
1627}
1628#else
1629static inline uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest,
1630 uint8_t val)
1631{
1632 uint8_t result = atomic_load_u8(dest);
1633 atomic_store_u8(dest, result ^ val);
1634 return result;
1635}
1636#endif /* HAS_ATOMIC_FETCH_XOR_U8 || !HAS_ATOMIC_STORE_U8 */
1637
1638#if defined(HAS_ATOMIC_FETCH_XOR_U16) || !defined(HAS_ATOMIC_STORE_U16)
1639static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1640 uint16_t val)
1641{
1642 return atomic_fetch_xor_u16(dest, val);
1643}
1644#else
1645static inline uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest,
1646 uint16_t val)
1647{
1648 uint16_t result = atomic_load_u16(dest);
1649 atomic_store_u16(dest, result ^ val);
1650 return result;
1651}
1652#endif /* HAS_ATOMIC_FETCH_XOR_U16 || !HAS_ATOMIC_STORE_U16 */
1653
1654#if defined(HAS_ATOMIC_FETCH_XOR_U32) || !defined(HAS_ATOMIC_STORE_U32)
1655static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1656 uint32_t val)
1657{
1658 return atomic_fetch_xor_u32(dest, val);
1659}
1660#else
1661static inline uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest,
1662 uint32_t val)
1663{
1664 uint32_t result = atomic_load_u32(dest);
1665 atomic_store_u32(dest, result ^ val);
1666 return result;
1667}
1668#endif /* HAS_ATOMIC_FETCH_XOR_U32 || !HAS_ATOMIC_STORE_U32 */
1669
1670#if defined(HAS_ATOMIC_FETCH_XOR_U64) || !defined(HAS_ATOMIC_STORE_U64)
1671static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1672 uint64_t val)
1673{
1674 return atomic_fetch_xor_u64(dest, val);
1675}
1676#else
1677static inline uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest,
1678 uint64_t val)
1679{
1680 uint64_t result = atomic_load_u64(dest);
1681 atomic_store_u64(dest, result ^ val);
1682 return result;
1683}
1684#endif /* HAS_ATOMIC_FETCH_XOR_U64 || !HAS_ATOMIC_STORE_U64 */
1685
1686/* FETCH_AND */
1687#if defined(HAS_ATOMIC_FETCH_AND_U8) || !defined(HAS_ATOMIC_STORE_U8)
1688static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1689 uint8_t val)
1690{
1691 return atomic_fetch_and_u8(dest, val);
1692}
1693#else
1694static inline uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest,
1695 uint8_t val)
1696{
1697 uint8_t result = atomic_load_u8(dest);
1698 atomic_store_u8(dest, result & val);
1699 return result;
1700}
1701#endif /* HAS_ATOMIC_FETCH_AND_U8 || !HAS_ATOMIC_STORE_U8 */
1702
1703#if defined(HAS_ATOMIC_FETCH_AND_U16) || !defined(HAS_ATOMIC_STORE_U16)
1704static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1705 uint16_t val)
1706{
1707 return atomic_fetch_and_u16(dest, val);
1708}
1709#else
1710static inline uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest,
1711 uint16_t val)
1712{
1713 uint16_t result = atomic_load_u16(dest);
1714 atomic_store_u16(dest, result & val);
1715 return result;
1716}
1717#endif /* HAS_ATOMIC_FETCH_AND_U16 || !HAS_ATOMIC_STORE_U16 */
1718
1719#if defined(HAS_ATOMIC_FETCH_AND_U32) || !defined(HAS_ATOMIC_STORE_U32)
1720static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1721 uint32_t val)
1722{
1723 return atomic_fetch_and_u32(dest, val);
1724}
1725#else
1726static inline uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest,
1727 uint32_t val)
1728{
1729 uint32_t result = atomic_load_u32(dest);
1730 atomic_store_u32(dest, result & val);
1731 return result;
1732}
1733#endif /* HAS_ATOMIC_FETCH_AND_U32 || !HAS_ATOMIC_STORE_U32 */
1734
1735#if defined(HAS_ATOMIC_FETCH_AND_U64) || !defined(HAS_ATOMIC_STORE_U64)
1736static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1737 uint64_t val)
1738{
1739 return atomic_fetch_and_u64(dest, val);
1740}
1741#else
1742static inline uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest,
1743 uint64_t val)
1744{
1745 uint64_t result = atomic_load_u64(dest);
1746 atomic_store_u64(dest, result & val);
1747 return result;
1748}
1749#endif /* HAS_ATOMIC_FETCH_AND_U64 || !HAS_ATOMIC_STORE_U64 */
1750
1751#ifdef __cplusplus
1752}
1753#endif
1754
1755/* NOLINTEND(bugprone-macro-parentheses, readability-inconsistent-declaration-parameter-name) */
Various helper macros.
int16_t kernel_pid_t
Unique process identifier.
Definition sched.h:138
static void atomic_store_uintptr(volatile uintptr_t *dest, uintptr_t val)
Store an uintptr_t atomically.
static atomic_bit_u32_t atomic_bit_u32(volatile uint32_t *dest, uint8_t bit)
Create a reference to a bit in an uint32_t
static uint64_t atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Atomically subtract a value from a given value.
static void atomic_store_u8(volatile uint8_t *dest, uint8_t val)
Store an uint8_t atomically.
static uint32_t atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest |= val
static uint32_t semi_atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest &= val
static void atomic_clear_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest &= ~(1 << bit)
static void atomic_set_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest |= (1 << bit)
#define ATOMIC_FETCH_OP_IMPL(opname, op, name, type)
Generates a static inline function implementing atomic_fecth_<op>_u<width>()
static uint8_t semi_atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Semi-atomically add a value onto a given value.
static void * atomic_load_ptr(void **ptr_addr)
Load an void * atomically.
static atomic_bit_u64_t atomic_bit_u64(volatile uint64_t *dest, uint8_t bit)
Create a reference to a bit in an uint64_t
static uint32_t atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest ^= val
#define ATOMIC_LOAD_IMPL(name, type)
Generates a static inline function implementing atomic_load_u<width>()
static void atomic_clear_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint8_t semi_atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest ^= val
static uint32_t atomic_load_u32(const volatile uint32_t *var)
Load an uint32_t atomically.
static uint32_t semi_atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Semi-atomically add a value onto a given value.
static kernel_pid_t atomic_load_kernel_pid(const volatile kernel_pid_t *var)
Load an kernel_pid_t atomically.
static unsigned atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest |= val
static uint32_t atomic_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest &= val
static void atomic_store_ptr(void **dest, const void *val)
Store an void * atomically.
static unsigned atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Atomically add a value onto a given value.
atomic_bit_u16_t atomic_bit_unsigned_t
Type specifying a bit in an unsigned int
static unsigned semi_atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest ^= val
static void atomic_store_u64(volatile uint64_t *dest, uint64_t val)
Store an uint64_t atomically.
static void atomic_store_unsigned(volatile unsigned *dest, unsigned val)
Store an uint64_t atomically.
static atomic_bit_unsigned_t atomic_bit_unsigned(volatile unsigned *dest, uint8_t bit)
Create a reference to a bit in an unsigned int
static uint16_t atomic_load_u16(const volatile uint16_t *var)
Load an uint16_t atomically.
static unsigned semi_atomic_fetch_or_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest &= val
static uint16_t atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest ^= val
static uint64_t atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest ^= val
static uint64_t semi_atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest |= val
static uint64_t semi_atomic_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest ^= val
static void atomic_set_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest |= (1 << bit)
static unsigned semi_atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest ^= val
static void atomic_clear_bit_u8(atomic_bit_u8_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint16_t semi_atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint8_t atomic_load_u8(const volatile uint8_t *var)
Load an uint8_t atomically.
static atomic_bit_u8_t atomic_bit_u8(volatile uint8_t *dest, uint8_t bit)
Create a reference to a bit in an uint8_t
static unsigned semi_atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Semi-atomic version of *dest &= val
static uint32_t semi_atomic_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest |= val
static uint16_t atomic_fetch_sub_u16(volatile uint16_t *dest, uint16_t subtrahend)
Atomically subtract a value from a given value.
static uint8_t atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Atomically subtract a value from a given value.
static uint32_t semi_atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Semi-atomically subtract a value from a given value.
static uint64_t semi_atomic_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Semi-atomic version of *dest &= val
static uint8_t atomic_fetch_add_u8(volatile uint8_t *dest, uint8_t summand)
Atomically add a value onto a given value.
static void atomic_clear_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest &= ~(1 << bit)
static uint64_t semi_atomic_fetch_sub_u64(volatile uint64_t *dest, uint64_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_store_kernel_pid(volatile kernel_pid_t *dest, kernel_pid_t val)
Store an kernel_pid_t atomically.
static void atomic_store_u16(volatile uint16_t *dest, uint16_t val)
Store an uint16_t atomically.
static void atomic_store_u32(volatile uint32_t *dest, uint32_t val)
Store an uint32_t atomically.
static uintptr_t atomic_load_uintptr(const volatile uintptr_t *var)
Load an uintptr_t atomically.
static atomic_bit_u16_t atomic_bit_u16(volatile uint16_t *dest, uint8_t bit)
Create a reference to a bit in an uint16_t
#define ATOMIC_STORE_IMPL(name, type)
Generates a static inline function implementing atomic_store_u<width>()
static uint32_t semi_atomic_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Semi-atomic version of *dest ^= val
static uint16_t atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Atomic version of *dest |= val
static void atomic_set_bit_u32(atomic_bit_u32_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest &= val
static uint32_t atomic_fetch_sub_u32(volatile uint32_t *dest, uint32_t subtrahend)
Atomically subtract a value from a given value.
static uint16_t semi_atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Semi-atomically add a value onto a given value.
static uint64_t semi_atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Semi-atomically add a value onto a given value.
static void atomic_clear_bit_u64(atomic_bit_u64_t bit)
Atomic version of *dest &= ~(1 << bit)
static unsigned atomic_fetch_sub_unsigned(volatile unsigned *dest, unsigned subtrahend)
Atomically subtract a value from a given value.
static uint64_t atomic_fetch_add_u64(volatile uint64_t *dest, uint64_t summand)
Atomically add a value onto a given value.
static uint16_t atomic_fetch_add_u16(volatile uint16_t *dest, uint16_t summand)
Atomically add a value onto a given value.
static uint8_t semi_atomic_fetch_sub_u8(volatile uint8_t *dest, uint8_t subtrahend)
Semi-atomically subtract a value from a given value.
static void atomic_set_bit_u16(atomic_bit_u16_t bit)
Atomic version of *dest |= (1 << bit)
static uint16_t semi_atomic_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest |= val
static uint8_t semi_atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest |= val
static uint64_t atomic_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Atomic version of *dest |= val
static uint64_t atomic_load_u64(const volatile uint64_t *var)
Load an uint64_t atomically.
static unsigned semi_atomic_fetch_add_unsigned(volatile unsigned *dest, unsigned summand)
Semi-atomically add a value onto a given value.
static void atomic_set_bit_unsigned(atomic_bit_unsigned_t bit)
Atomic version of *dest |= (1 << bit)
static uint32_t atomic_fetch_add_u32(volatile uint32_t *dest, uint32_t summand)
Atomically add a value onto a given value.
static uint16_t semi_atomic_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Semi-atomic version of *dest ^= val
static unsigned atomic_fetch_xor_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest ^= val
static unsigned atomic_load_unsigned(const volatile unsigned *var)
Load an unsigned int atomically.
static uint8_t atomic_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest |= val
static uint8_t semi_atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Semi-atomic version of *dest &= val
static unsigned atomic_fetch_and_unsigned(volatile unsigned *dest, unsigned val)
Atomic version of *dest &= val
static uint8_t atomic_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Atomic version of *dest &= val
IRQ driver interface.
Scheduler API definition.
Type specifying a bit in an uint16_t
uint16_t mask
Bitmask used for setting the bit.
volatile uint16_t * dest
Memory containing the bit to set/clear.
Type specifying a bit in an uint32_t
volatile uint32_t * dest
Memory containing the bit to set/clear.
uint32_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint64_t
volatile uint64_t * dest
Memory containing the bit to set/clear.
uint64_t mask
Bitmask used for setting the bit.
Type specifying a bit in an uint8_t
uint8_t mask
Bitmask used for setting the bit.
volatile uint8_t * dest
Memory containing the bit to set/clear.