Loading...
Searching...
No Matches
volatile_utils.h
Go to the documentation of this file.
1/*
2 * Copyright (C) 2020 Otto-von-Guericke-Universität Magdeburg
3 *
4 * This file is subject to the terms and conditions of the GNU Lesser General
5 * Public License v2.1. See the file LICENSE in the top level directory for more
6 * details.
7 */
8
9#pragma once
10
29
30#include <stdint.h>
31
32#ifdef __cplusplus
33extern "C" {
34#endif
35
41static inline uint8_t volatile_load_u8(const volatile uint8_t *var)
42{
43 return *var;
44}
45
50static inline uint16_t volatile_load_u16(const volatile uint16_t *var)
51{
52 return *var;
53}
54
59static inline uint32_t volatile_load_u32(const volatile uint32_t *var)
60{
61 return *var;
62}
63
68static inline uint64_t volatile_load_u64(const volatile uint64_t *var)
69{
70 return *var;
71}
72
78static inline void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
79{
80 *dest = val;
81}
82
87static inline void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
88{
89 *dest = val;
90}
91
96static inline void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
97{
98 *dest = val;
99}
100
105static inline void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
106{
107 *dest = val;
108}
109
115static inline uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
116{
117 uint8_t result = *dest;
118 *dest = result + val;
119 return result;
120}
121
126static inline uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
127{
128 uint8_t result = *dest;
129 *dest = result - val;
130 return result;
131}
132
137static inline uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
138{
139 uint8_t result = *dest;
140 *dest = result | val;
141 return result;
142}
143
148static inline uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
149{
150 uint8_t result = *dest;
151 *dest = result ^ val;
152 return result;
153}
154
159static inline uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
160{
161 uint8_t result = *dest;
162 *dest = result & val;
163 return result;
164}
165
171static inline uint16_t volatile_fetch_add_u16(volatile uint16_t *dest,
172 uint16_t val)
173{
174 uint16_t result = *dest;
175 *dest = result + val;
176 return result;
177}
178
183static inline uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest,
184 uint16_t val)
185{
186 uint16_t result = *dest;
187 *dest = result - val;
188 return result;
189}
190
195static inline uint16_t volatile_fetch_or_u16(volatile uint16_t *dest,
196 uint16_t val)
197{
198 uint16_t result = *dest;
199 *dest = result | val;
200 return result;
201}
202
207static inline uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest,
208 uint16_t val)
209{
210 uint16_t result = *dest;
211 *dest = result ^ val;
212 return result;
213}
214
219static inline uint16_t volatile_fetch_and_u16(volatile uint16_t *dest,
220 uint16_t val)
221{
222 uint16_t result = *dest;
223 *dest = result & val;
224 return result;
225}
226
232static inline uint32_t volatile_fetch_add_u32(volatile uint32_t *dest,
233 uint32_t val)
234{
235 uint32_t result = *dest;
236 *dest = result + val;
237 return result;
238}
239
244static inline uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest,
245 uint32_t val)
246{
247 uint32_t result = *dest;
248 *dest = result - val;
249 return result;
250}
251
256static inline uint32_t volatile_fetch_or_u32(volatile uint32_t *dest,
257 uint32_t val)
258{
259 uint32_t result = *dest;
260 *dest = result | val;
261 return result;
262}
263
268static inline uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest,
269 uint32_t val)
270{
271 uint32_t result = *dest;
272 *dest = result ^ val;
273 return result;
274}
275
280static inline uint32_t volatile_fetch_and_u32(volatile uint32_t *dest,
281 uint32_t val)
282{
283 uint32_t result = *dest;
284 *dest = result & val;
285 return result;
286}
287
293static inline uint64_t volatile_fetch_add_u64(volatile uint64_t *dest,
294 uint64_t val)
295{
296 uint64_t result = *dest;
297 *dest = result + val;
298 return result;
299}
300
305static inline uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest,
306 uint64_t val)
307{
308 uint64_t result = *dest;
309 *dest = result - val;
310 return result;
311}
312
317static inline uint64_t volatile_fetch_or_u64(volatile uint64_t *dest,
318 uint64_t val)
319{
320 uint64_t result = *dest;
321 *dest = result | val;
322 return result;
323}
324
329static inline uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest,
330 uint64_t val)
331{
332 uint64_t result = *dest;
333 *dest = result ^ val;
334 return result;
335}
336
341static inline uint64_t volatile_fetch_and_u64(volatile uint64_t *dest,
342 uint64_t val)
343{
344 uint64_t result = *dest;
345 *dest = result & val;
346 return result;
347}
348
349#ifdef __cplusplus
350}
351#endif
352
static uint16_t volatile_fetch_or_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_xor_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_and_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest &= val
static uint16_t volatile_load_u16(const volatile uint16_t *var)
Load an 16 bit value completely unoptimized.
static uint32_t volatile_fetch_sub_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest -= val
static uint8_t volatile_load_u8(const volatile uint8_t *var)
Load an 8 bit value completely unoptimized.
static uint32_t volatile_load_u32(const volatile uint32_t *var)
Load an 32 bit value completely unoptimized.
static uint32_t volatile_fetch_xor_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_sub_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_or_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest |= val
static uint32_t volatile_fetch_add_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest += val
static void volatile_store_u64(volatile uint64_t *dest, uint64_t val)
Store a 64 bit value completely unoptimized.
static uint8_t volatile_fetch_and_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest &= val
static uint8_t volatile_fetch_add_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_sub_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest -= val
static uint32_t volatile_fetch_or_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest |= val
static void volatile_store_u32(volatile uint32_t *dest, uint32_t val)
Store a 32 bit value completely unoptimized.
static void volatile_store_u16(volatile uint16_t *dest, uint16_t val)
Store a 16 bit value completely unoptimized.
static uint8_t volatile_fetch_xor_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest ^= val
static uint64_t volatile_fetch_xor_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest ^= val
static uint16_t volatile_fetch_add_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest += val
static uint8_t volatile_fetch_or_u8(volatile uint8_t *dest, uint8_t val)
Unoptimized version of *dest |= val
static uint16_t volatile_fetch_sub_u16(volatile uint16_t *dest, uint16_t val)
Unoptimized version of *dest -= val
static uint64_t volatile_fetch_and_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest &= val
static uint32_t volatile_fetch_and_u32(volatile uint32_t *dest, uint32_t val)
Unoptimized version of *dest &= val
static void volatile_store_u8(volatile uint8_t *dest, uint8_t val)
Store an 8 bit value completely unoptimized.
static uint64_t volatile_load_u64(const volatile uint64_t *var)
Load an 64 bit value completely unoptimized.
static uint64_t volatile_fetch_add_u64(volatile uint64_t *dest, uint64_t val)
Unoptimized version of *dest += val