1 /*
2  * Copyright (c) 2015 Intel Corporation
3  *
4  * SPDX-License-Identifier: Apache-2.0
5  */
6 
7 #include <zephyr/ztest.h>
8 #include <zephyr/sys/atomic.h>
9 
10 /* convenience macro - return either 64-bit or 32-bit value */
11 #define ATOMIC_WORD(val_if_64, val_if_32)                                                          \
12 	((atomic_t)((sizeof(void *) == sizeof(uint64_t)) ? (val_if_64) : (val_if_32)))
13 
14 /* an example of the number of atomic bit in an array */
15 #define NUM_FLAG_BITS 100
16 
17 /* set test_cycle 1000us * 20 = 20ms */
18 #define TEST_CYCLE 20
19 
20 #define THREADS_NUM 2
21 
22 #define STACK_SIZE (512 + CONFIG_TEST_EXTRA_STACK_SIZE)
23 
24 static K_THREAD_STACK_ARRAY_DEFINE(stack, THREADS_NUM, STACK_SIZE);
25 
26 static struct k_thread thread[THREADS_NUM];
27 
28 atomic_t total_atomic;
29 
30 /**
31  * @defgroup kernel_atomic_ops_tests Atomic Operations
32  * @ingroup all_tests
33  * @{
34  * @}
35  *
36  * @addtogroup kernel_atomic_ops_tests
37  * @{
38  */
39 
40 /**
41  * @brief Verify atomic functionalities
42  * @details
43  * Test Objective:
44  * - Test the function of the atomic operation API is correct.
45  *
46  * Test techniques:
47  * - Dynamic analysis and testing
48  * - Functional and black box testing
49  * - Interface testing
50  *
51  * Prerequisite Conditions:
52  * - N/A
53  *
54  * Input Specifications:
55  * - N/A
56  *
57  * Test Procedure:
58  * -# Call the API interface of the following atomic operations in turn,
59  * judge the change of function return value and target operands.
60  * - atomic_cas()
61  * - atomic_ptr_cas()
62  * - atomic_add()
63  * - atomic_sub()
64  * - atomic_inc()
65  * - atomic_dec()
66  * - atomic_get()
67  * - atomic_ptr_get()
68  * - atomic_set()
69  * - atomic_ptr_set()
70  * - atomic_clear()
71  * - atomic_ptr_clear()
72  * - atomic_or()
73  * - atomic_xor()
74  * - atomic_and()
75  * - atomic_nand()
76  * - atomic_test_bit()
77  * - atomic_test_and_clear_bit()
78  * - atomic_test_and_set_bit()
79  * - atomic_clear_bit()
80  * - atomic_set_bit()
81  * - atomic_set_bit_to()
82  * - ATOMIC_DEFINE
83  *
84  * Expected Test Result:
85  * - The change of function return value and target operands is correct.
86  *
87  * Pass/Fail Criteria:
88  * - Successful if check points in test procedure are all passed, otherwise failure.
89  *
90  * Assumptions and Constraints:
91  * - N/A
92  *
93  * @see atomic_cas(), atomic_add(), atomic_sub(),
94  * atomic_inc(), atomic_dec(), atomic_get(), atomic_set(),
95  * atomic_clear(), atomic_or(), atomic_and(), atomic_xor(),
96  * atomic_nand(), atomic_test_bit(), atomic_test_and_clear_bit(),
97  * atomic_test_and_set_bit(), atomic_clear_bit(), atomic_set_bit(),
98  * ATOMIC_DEFINE
99  *
100  */
ZTEST_USER(atomic,test_atomic)101 ZTEST_USER(atomic, test_atomic)
102 {
103 	int i;
104 
105 	atomic_t target, orig;
106 	atomic_ptr_t ptr_target;
107 	atomic_val_t value;
108 	atomic_val_t oldvalue;
109 	void *ptr_value, *old_ptr_value;
110 
111 	ATOMIC_DEFINE(flag_bits, NUM_FLAG_BITS) = {0};
112 
113 	zassert_equal(sizeof(atomic_t), ATOMIC_WORD(sizeof(uint64_t), sizeof(uint32_t)),
114 		      "sizeof(atomic_t)");
115 
116 	target = 4;
117 	value = 5;
118 	oldvalue = 6;
119 
120 	/* atomic_cas() */
121 	zassert_false(atomic_cas(&target, oldvalue, value), "atomic_cas");
122 	target = 6;
123 	zassert_true(atomic_cas(&target, oldvalue, value), "atomic_cas");
124 	zassert_true((target == value), "atomic_cas");
125 
126 	/* atomic_ptr_cas() */
127 	ptr_target = ATOMIC_PTR_INIT((void *)4);
128 	ptr_value = (atomic_ptr_val_t)5;
129 	old_ptr_value = (atomic_ptr_val_t)6;
130 	zassert_false(atomic_ptr_cas(&ptr_target, old_ptr_value, ptr_value),
131 		      "atomic_ptr_cas");
132 	ptr_target = (atomic_ptr_val_t)6;
133 	zassert_true(atomic_ptr_cas(&ptr_target, old_ptr_value, ptr_value),
134 		     "atomic_ptr_cas");
135 	zassert_true((ptr_target == ptr_value), "atomic_ptr_cas");
136 
137 	/* atomic_add() */
138 	target = 1;
139 	value = 2;
140 	zassert_true((atomic_add(&target, value) == 1), "atomic_add");
141 	zassert_true((target == 3), "atomic_add");
142 	/* Test the atomic_add() function parameters can be negative */
143 	target = 2;
144 	value = -4;
145 	zassert_true((atomic_add(&target, value) == 2), "atomic_add");
146 	zassert_true((target == -2), "atomic_add");
147 
148 	/* atomic_sub() */
149 	target = 10;
150 	value = 2;
151 	zassert_true((atomic_sub(&target, value) == 10), "atomic_sub");
152 	zassert_true((target == 8), "atomic_sub");
153 	/* Test the atomic_sub() function parameters can be negative */
154 	target = 5;
155 	value = -4;
156 	zassert_true((atomic_sub(&target, value) == 5), "atomic_sub");
157 	zassert_true((target == 9), "atomic_sub");
158 
159 	/* atomic_inc() */
160 	target = 5;
161 	zassert_true((atomic_inc(&target) == 5), "atomic_inc");
162 	zassert_true((target == 6), "atomic_inc");
163 
164 	/* atomic_dec() */
165 	target = 2;
166 	zassert_true((atomic_dec(&target) == 2), "atomic_dec");
167 	zassert_true((target == 1), "atomic_dec");
168 
169 	/* atomic_get() */
170 	target = 50;
171 	zassert_true((atomic_get(&target) == 50), "atomic_get");
172 
173 	/* atomic_ptr_get() */
174 	ptr_target = ATOMIC_PTR_INIT((void *)50);
175 	zassert_true((atomic_ptr_get(&ptr_target) == (atomic_ptr_val_t)50),
176 		     "atomic_ptr_get");
177 
178 	/* atomic_set() */
179 	target = 42;
180 	value = 77;
181 	zassert_true((atomic_set(&target, value) == 42), "atomic_set");
182 	zassert_true((target == value), "atomic_set");
183 
184 	/* atomic_ptr_set() */
185 	ptr_target = ATOMIC_PTR_INIT((void *)42);
186 	ptr_value = (atomic_ptr_val_t)77;
187 	zassert_true((atomic_ptr_set(&ptr_target, ptr_value) == (atomic_ptr_val_t)42),
188 		     "atomic_ptr_set");
189 	zassert_true((ptr_target == ptr_value), "atomic_ptr_set");
190 
191 	/* atomic_clear() */
192 	target = 100;
193 	zassert_true((atomic_clear(&target) == 100), "atomic_clear");
194 	zassert_true((target == 0), "atomic_clear");
195 
196 	/* atomic_ptr_clear() */
197 	ptr_target = ATOMIC_PTR_INIT((void *)100);
198 	zassert_true((atomic_ptr_clear(&ptr_target) == (atomic_ptr_val_t)100),
199 		     "atomic_ptr_clear");
200 	zassert_true((ptr_target == NULL), "atomic_ptr_clear");
201 
202 	/* atomic_or() */
203 	target = 0xFF00;
204 	value  = 0x0F0F;
205 	zassert_true((atomic_or(&target, value) == 0xFF00), "atomic_or");
206 	zassert_true((target == 0xFF0F), "atomic_or");
207 
208 	/* atomic_xor() */
209 	target = 0xFF00;
210 	value  = 0x0F0F;
211 	zassert_true((atomic_xor(&target, value) == 0xFF00), "atomic_xor");
212 	zassert_true((target == 0xF00F), "atomic_xor");
213 
214 	/* atomic_and() */
215 	target = 0xFF00;
216 	value  = 0x0F0F;
217 	zassert_true((atomic_and(&target, value) == 0xFF00), "atomic_and");
218 	zassert_true((target == 0x0F00), "atomic_and");
219 
220 
221 	/* atomic_nand() */
222 	target = 0xFF00;
223 	value  = 0x0F0F;
224 	zassert_true((atomic_nand(&target, value) == 0xFF00), "atomic_nand");
225 	zassert_true((target == ATOMIC_WORD(0xFFFFFFFFFFFFF0FF, 0xFFFFF0FF)), "atomic_nand");
226 
227 	/* atomic_test_bit() */
228 	for (i = 0; i < ATOMIC_BITS; i++) {
229 		target = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
230 		zassert_true(!!(atomic_test_bit(&target, i) == !!(target & BIT(i))),
231 			     "atomic_test_bit");
232 	}
233 
234 	/* atomic_test_and_clear_bit() */
235 	for (i = 0; i < ATOMIC_BITS; i++) {
236 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
237 		target = orig;
238 		zassert_true(!!(atomic_test_and_clear_bit(&target, i)) == !!(orig & BIT(i)),
239 			     "atomic_test_and_clear_bit");
240 		zassert_true(target == (orig & ~BIT(i)), "atomic_test_and_clear_bit");
241 	}
242 
243 	/* atomic_test_and_set_bit() */
244 	for (i = 0; i < ATOMIC_BITS; i++) {
245 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
246 		target = orig;
247 		zassert_true(!!(atomic_test_and_set_bit(&target, i)) == !!(orig & BIT(i)),
248 			     "atomic_test_and_set_bit");
249 		zassert_true(target == (orig | BIT(i)), "atomic_test_and_set_bit");
250 	}
251 
252 	/* atomic_clear_bit() */
253 	for (i = 0; i < ATOMIC_BITS; i++) {
254 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
255 		target = orig;
256 		atomic_clear_bit(&target, i);
257 		zassert_true(target == (orig & ~BIT(i)), "atomic_clear_bit");
258 	}
259 
260 	/* atomic_set_bit() */
261 	for (i = 0; i < ATOMIC_BITS; i++) {
262 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
263 		target = orig;
264 		atomic_set_bit(&target, i);
265 		zassert_true(target == (orig | BIT(i)), "atomic_set_bit");
266 	}
267 
268 	/* atomic_set_bit_to(&target, i, false) */
269 	for (i = 0; i < ATOMIC_BITS; i++) {
270 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
271 		target = orig;
272 		atomic_set_bit_to(&target, i, false);
273 		zassert_true(target == (orig & ~BIT(i)), "atomic_set_bit_to");
274 	}
275 
276 	/* atomic_set_bit_to(&target, i, true) */
277 	for (i = 0; i < ATOMIC_BITS; i++) {
278 		orig = ATOMIC_WORD(0x0F0F0F0F0F0F0F0F, 0x0F0F0F0F);
279 		target = orig;
280 		atomic_set_bit_to(&target, i, true);
281 		zassert_true(target == (orig | BIT(i)), "atomic_set_bit_to");
282 	}
283 
284 	/* ATOMIC_DEFINE */
285 	for (i = 0; i < NUM_FLAG_BITS; i++) {
286 		atomic_set_bit(flag_bits, i);
287 		zassert_true(!!atomic_test_bit(flag_bits, i) == !!(1),
288 			"Failed to set a single bit in an array of atomic variables");
289 		atomic_clear_bit(flag_bits, i);
290 		zassert_true(!!atomic_test_bit(flag_bits, i) == !!(0),
291 			"Failed to clear a single bit in an array of atomic variables");
292 	}
293 }
294 
295 /* This helper function will run more the one slice */
atomic_handler(void * p1,void * p2,void * p3)296 void atomic_handler(void *p1, void *p2, void *p3)
297 {
298 	ARG_UNUSED(p1);
299 	ARG_UNUSED(p2);
300 	ARG_UNUSED(p3);
301 
302 	for (int i = 0; i < TEST_CYCLE; i++) {
303 		atomic_inc(&total_atomic);
304 		/* Do 1000us busywait to longer the handler execute time */
305 		k_busy_wait(1000);
306 	}
307 }
308 
309 /**
310  * @brief Verify atomic operation with threads
311  *
312  * @details Creat two preempt threads with equal priority to
313  * atomically access the same atomic value. Because these preempt
314  * threads are of equal priority, so enable time slice to make
315  * them scheduled. The thread will execute for some time.
316  * In this time, the two sub threads will be scheduled separately
317  * according to the time slice.
318  *
319  */
ZTEST(atomic,test_threads_access_atomic)320 ZTEST(atomic, test_threads_access_atomic)
321 {
322 	k_tid_t tid[THREADS_NUM];
323 
324 	/* enable time slice 1ms at priority 10 */
325 	k_sched_time_slice_set(1, K_PRIO_PREEMPT(10));
326 
327 	for (int i = 0; i < THREADS_NUM; i++) {
328 		tid[i] = k_thread_create(&thread[i], stack[i], STACK_SIZE,
329 				atomic_handler, NULL, NULL, NULL,
330 				K_PRIO_PREEMPT(10), 0, K_NO_WAIT);
331 	}
332 
333 	for (int i = 0; i < THREADS_NUM; i++) {
334 		k_thread_join(tid[i], K_FOREVER);
335 	}
336 
337 	/* disable time slice */
338 	k_sched_time_slice_set(0, K_PRIO_PREEMPT(10));
339 
340 	zassert_true(total_atomic == (TEST_CYCLE * THREADS_NUM),
341 		"atomic counting failure");
342 }
343 
344 /**
345  * @brief Checks that the value of atomic_t will be the same in case of overflow
346  *		if incremented in atomic and non-atomic manner
347  *
348  * @details According to C standard the value of a signed variable
349  *	is undefined in case of overflow. This test checks that the value
350  *	of atomic_t will be the same in case of overflow if incremented in atomic
351  *	and non-atomic manner. This allows us to increment an atomic variable
352  *	in a non-atomic manner (as long as it is logically safe)
353  *	and expect its value to match the result of the similar atomic increment.
354  *
355  */
ZTEST(atomic,test_atomic_overflow)356 ZTEST(atomic, test_atomic_overflow)
357 {
358 	/* Check overflow over max signed value */
359 	uint64_t overflowed_value = (uint64_t)1 << (ATOMIC_BITS - 1);
360 	atomic_val_t atomic_value = overflowed_value - 1;
361 	atomic_t atomic_var = ATOMIC_INIT(atomic_value);
362 
363 	atomic_value++;
364 	atomic_inc(&atomic_var);
365 
366 	zassert_true(atomic_value == atomic_get(&atomic_var),
367 		"max signed overflow mismatch: %lx/%lx",
368 		atomic_value, atomic_get(&atomic_var));
369 	zassert_true(atomic_value == (atomic_val_t)overflowed_value,
370 		"unexpected value after overflow: %lx, expected: %lx",
371 		atomic_value, (atomic_val_t)overflowed_value);
372 
373 	/* Check overflow over max unsigned value */
374 	atomic_value = -1;
375 	atomic_var = ATOMIC_INIT(atomic_value);
376 
377 	atomic_value++;
378 	atomic_inc(&atomic_var);
379 
380 	zassert_true(atomic_value == atomic_get(&atomic_var),
381 		"max unsigned overflow mismatch: %lx/%lx",
382 		atomic_value, atomic_get(&atomic_var));
383 	zassert_true(atomic_value == 0,
384 		"unexpected value after overflow: %lx, expected: 0",
385 		atomic_value);
386 }
387 
388 /**
389  * @}
390  */
391 extern void *common_setup(void);
392 ZTEST_SUITE(atomic, NULL, common_setup, NULL, NULL, NULL);
393