5
5
/* System Headers */
6
6
#include <pthread.h>
7
7
#include <sys/types.h>
8
+ #include <stdatomic.h>
8
9
#include <stdio.h>
9
10
#include <stdlib.h>
10
11
@@ -42,14 +43,14 @@ int num_spins_before_condwait;
42
43
43
44
/* Data Structures */
44
45
struct _qt_threadqueue_node {
45
- struct _qt_threadqueue_node * next ;
46
+ struct _qt_threadqueue_node * _Atomic next ;
46
47
qthread_t * thread ;
47
48
};
48
49
49
50
typedef struct {
50
51
/* The First Cacheline */
51
- void * head ;
52
- void * tail ;
52
+ void * _Atomic head ;
53
+ void * _Atomic tail ;
53
54
uint8_t pad1 [CACHELINE_WIDTH - (2 * sizeof (void * ))];
54
55
/* The Second Cacheline */
55
56
void * shadow_head ;
@@ -110,7 +111,9 @@ qt_threadqueue_t INTERNAL *qt_threadqueue_new(void)
110
111
111
112
qassert_ret (q != NULL , NULL );
112
113
113
- q -> q .shadow_head = q -> q .head = q -> q .tail = NULL ;
114
+ atomic_init (& q -> q .head , NULL );
115
+ atomic_init (& q -> q .tail , NULL );
116
+ q -> q .shadow_head = NULL ;
114
117
q -> advisory_queuelen = 0 ;
115
118
q -> q .nemesis_advisory_queuelen = 0 ; // redundant
116
119
#ifdef QTHREAD_CONDWAIT_BLOCKING_QUEUE
@@ -124,27 +127,31 @@ qt_threadqueue_t INTERNAL *qt_threadqueue_new(void)
124
127
static inline qt_threadqueue_node_t * qt_internal_NEMESIS_dequeue (NEMESIS_queue * q )
125
128
{ /*{{{ */
126
129
if (!q -> shadow_head ) {
127
- if (!q -> head ) {
130
+ void * head = atomic_load_explicit (& q -> head , memory_order_relaxed );
131
+ if (!head ) {
128
132
return NULL ;
129
133
}
130
- q -> shadow_head = q -> head ;
131
- q -> head = NULL ;
134
+ q -> shadow_head = head ;
135
+
136
+ atomic_store_explicit (& q -> head , NULL , memory_order_relaxed );
132
137
}
133
138
134
139
qt_threadqueue_node_t * const retval = (void * volatile )(q -> shadow_head );
135
140
136
141
if ((retval != NULL ) && (retval != (void * )1 )) {
137
- if (retval -> next != NULL ) {
138
- q -> shadow_head = retval -> next ;
139
- retval -> next = NULL ;
142
+ struct _qt_threadqueue_node * next_loc = atomic_load_explicit (& retval -> next , memory_order_acquire );
143
+ if (next_loc != NULL ) {
144
+ q -> shadow_head = next_loc ;
145
+ atomic_store_explicit (& retval -> next , NULL , memory_order_relaxed );
140
146
} else {
141
147
qt_threadqueue_node_t * old ;
142
148
q -> shadow_head = NULL ;
143
- old = qthread_cas_ptr (& (q -> tail ), retval , NULL );
149
+ old = qthread_cas_ptr (( void * * ) & (q -> tail ), retval , NULL );
144
150
if (old != retval ) {
145
- while (retval -> next == NULL ) SPINLOCK_BODY ();
146
- q -> shadow_head = retval -> next ;
147
- retval -> next = NULL ;
151
+ void * retval_next_tmp ;
152
+ while ((retval_next_tmp = atomic_load_explicit (& retval -> next , memory_order_relaxed )) == NULL ) SPINLOCK_BODY ();
153
+ q -> shadow_head = retval_next_tmp ;
154
+ atomic_store_explicit (& retval -> next , NULL , memory_order_relaxed );
148
155
}
149
156
}
150
157
}
@@ -154,27 +161,29 @@ static inline qt_threadqueue_node_t *qt_internal_NEMESIS_dequeue(NEMESIS_queue *
154
161
static inline qt_threadqueue_node_t * qt_internal_NEMESIS_dequeue_st (NEMESIS_queue * q )
155
162
{ /*{{{ */
156
163
if (!q -> shadow_head ) {
157
- if (!q -> head ) {
164
+ void * head = atomic_load_explicit (& q -> head , memory_order_relaxed );
165
+ if (!head ) {
158
166
return NULL ;
159
167
}
160
- q -> shadow_head = q -> head ;
161
- q -> head = NULL ;
168
+ q -> shadow_head = head ;
169
+ atomic_store_explicit ( & q -> head , NULL , memory_order_relaxed ) ;
162
170
}
163
171
164
172
qt_threadqueue_node_t * const retval = (void * volatile )(q -> shadow_head );
165
173
166
174
if ((retval != NULL ) && (retval != (void * )1 )) {
167
- if (retval -> next != NULL ) {
168
- q -> shadow_head = retval -> next ;
169
- retval -> next = NULL ;
175
+ void * retval_next_tmp = atomic_load_explicit (& retval -> next , memory_order_relaxed );
176
+ if (retval_next_tmp != NULL ) {
177
+ q -> shadow_head = retval_next_tmp ;
178
+ atomic_store_explicit (& retval -> next , NULL , memory_order_relaxed );
170
179
} else {
171
180
q -> shadow_head = NULL ;
172
- if (q -> tail == retval ) {
173
- q -> tail = NULL ;
181
+ if (atomic_load_explicit ( & q -> tail , memory_order_relaxed ) == retval ) {
182
+ atomic_store_explicit ( & q -> tail , NULL , memory_order_relaxed ) ;
174
183
}
175
184
}
176
185
}
177
- qthread_debug (THREADQUEUE_DETAILS , "nemesis q:%p head:%p tail:%p shadow_head:%p\n" , q , q -> head , q -> tail , q -> shadow_head );
186
+ qthread_debug (THREADQUEUE_DETAILS , "nemesis q:%p head:%p tail:%p shadow_head:%p\n" , q , atomic_load_explicit ( & q -> head , memory_order_relaxed ), atomic_load_explicit ( & q -> tail , memory_order_relaxed ) , q -> shadow_head );
178
187
return retval ;
179
188
} /*}}} */
180
189
@@ -185,7 +194,7 @@ void INTERNAL qt_threadqueue_free(qt_threadqueue_t *q)
185
194
qt_threadqueue_node_t * node = qt_internal_NEMESIS_dequeue_st (& q -> q );
186
195
if (node ) {
187
196
qthread_t * retval = node -> thread ;
188
- assert (node -> next == NULL );
197
+ assert (atomic_load_explicit ( & node -> next , memory_order_relaxed ) == NULL );
189
198
(void )qthread_incr (& (q -> advisory_queuelen ), -1 );
190
199
FREE_TQNODE (node );
191
200
qthread_thread_free (retval );
@@ -242,18 +251,18 @@ static void sanity_check_tq(NEMESIS_queue *q)
242
251
if (q -> shadow_head ) {
243
252
assert (q -> head != q -> shadow_head );
244
253
}
245
- if (q -> tail != NULL ) {
246
- if (q -> head == NULL ) {
254
+ if (atomic_load_explicit ( & q -> tail , memory_order_relaxed ) != NULL ) {
255
+ if (atomic_load_explicit ( & q -> head , memory_order_relaxed ) == NULL ) {
247
256
assert (q -> shadow_head != NULL );
248
257
}
249
258
}
250
- if ((q -> head != NULL ) || (q -> tail != NULL )) {
259
+ if ((atomic_load_explicit ( & q -> head , memory_order_relaxed ) != NULL ) || (atomic_load_explicit ( & q -> tail , memory_order_relaxed ) != NULL )) {
251
260
if (q -> shadow_head ) {
252
261
curs = q -> shadow_head ;
253
262
assert (curs -> thread );
254
263
assert (curs -> thread != (void * )0x7777777777777777 );
255
- while (curs -> next ) {
256
- curs = curs -> next ;
264
+ while (atomic_load_explicit ( & curs -> next , memory_order_relaxed ) ) {
265
+ curs = atomic_load_explicit ( & curs -> next , memory_order_relaxed ) ;
257
266
assert (curs -> thread );
258
267
assert (curs -> thread != (void * )0x7777777777777777 );
259
268
}
@@ -262,8 +271,8 @@ static void sanity_check_tq(NEMESIS_queue *q)
262
271
curs = q -> head ;
263
272
assert (curs -> thread );
264
273
assert (curs -> thread != (void * )0x7777777777777777 );
265
- while (curs -> next ) {
266
- curs = curs -> next ;
274
+ while (atomic_load_explicit ( & curs -> next , memory_order_relaxed ) ) {
275
+ curs = atomic_load_explicit ( & curs -> next , memory_order_relaxed ) ;
267
276
assert (curs -> thread );
268
277
assert (curs -> thread != (void * )0x7777777777777777 );
269
278
}
@@ -305,14 +314,14 @@ void INTERNAL qt_threadqueue_enqueue(qt_threadqueue_t *restrict q,
305
314
node = ALLOC_TQNODE ();
306
315
assert (node != NULL );
307
316
node -> thread = t ;
308
- node -> next = NULL ;
317
+ atomic_store_explicit ( & node -> next , NULL , memory_order_release ) ;
309
318
310
319
prev = qt_internal_atomic_swap_ptr ((void * * )& (q -> q .tail ), node );
311
320
312
321
if (prev == NULL ) {
313
- q -> q .head = node ;
322
+ atomic_store_explicit ( & q -> q .head , node , memory_order_relaxed ) ;
314
323
} else {
315
- prev -> next = node ;
324
+ atomic_store_explicit ( & prev -> next , node , memory_order_relaxed ) ;
316
325
}
317
326
PARANOIA (sanity_check_tq (& q -> q ));
318
327
(void )qthread_incr (& (q -> advisory_queuelen ), 1 );
@@ -354,12 +363,12 @@ qthread_t INTERNAL *qt_scheduler_get_thread(qt_threadqueue_t *q,
354
363
#ifdef QTHREAD_USE_EUREKAS
355
364
qt_eureka_disable ();
356
365
#endif /* QTHREAD_USE_EUREKAS */
357
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> q .shadow_head , q -> advisory_queuelen );
366
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ) , q -> q .shadow_head , q -> advisory_queuelen );
358
367
PARANOIA (sanity_check_tq (& q -> q ));
359
368
qt_threadqueue_node_t * node = qt_internal_NEMESIS_dequeue (& q -> q );
360
369
qthread_t * retval ;
361
370
362
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> q .shadow_head , q -> advisory_queuelen );
371
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail ) , q -> q .shadow_head , q -> advisory_queuelen );
363
372
PARANOIA (sanity_check_tq (& q -> q ));
364
373
if (node == NULL ) {
365
374
#ifdef QTHREAD_USE_EUREKAS
@@ -368,13 +377,13 @@ qthread_t INTERNAL *qt_scheduler_get_thread(qt_threadqueue_t *q,
368
377
369
378
#ifdef QTHREAD_CONDWAIT_BLOCKING_QUEUE
370
379
i = num_spins_before_condwait ;
371
- while (q -> q .shadow_head == NULL && q -> q .head == NULL && i > 0 ) {
380
+ while (q -> q .shadow_head == NULL && atomic_load_explicit ( & q -> q .head , memory_order_relaxed ) == NULL && i > 0 ) {
372
381
SPINLOCK_BODY ();
373
382
i -- ;
374
383
}
375
384
#endif /* QTHREAD_CONDWAIT_BLOCKING_QUEUE */
376
385
377
- while (q -> q .shadow_head == NULL && q -> q .head == NULL ) {
386
+ while (q -> q .shadow_head == NULL && atomic_load_explicit ( & q -> q .head , memory_order_relaxed ) == NULL ) {
378
387
#ifndef QTHREAD_CONDWAIT_BLOCKING_QUEUE
379
388
SPINLOCK_BODY ();
380
389
#else
@@ -393,11 +402,11 @@ qthread_t INTERNAL *qt_scheduler_get_thread(qt_threadqueue_t *q,
393
402
node = qt_internal_NEMESIS_dequeue (& q -> q );
394
403
}
395
404
assert (node );
396
- assert (node -> next == NULL );
405
+ assert (atomic_load_explicit ( & node -> next , memory_order_relaxed ) == NULL );
397
406
(void )qthread_incr (& (q -> advisory_queuelen ), -1 );
398
407
retval = node -> thread ;
399
408
FREE_TQNODE (node );
400
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> q .shadow_head , q -> advisory_queuelen );
409
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p sh:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail ) , q -> q .shadow_head , q -> advisory_queuelen );
401
410
PARANOIA (sanity_check_tq (& q -> q ));
402
411
return retval ;
403
412
} /*}}} */
@@ -412,33 +421,33 @@ void INTERNAL qt_threadqueue_filter(qt_threadqueue_t *q,
412
421
assert (q != NULL );
413
422
qthread_debug (THREADQUEUE_FUNCTIONS , "begin q:%p f:%p\n" , q , f );
414
423
415
- tmp .head = NULL ;
416
- tmp .tail = NULL ;
424
+ atomic_init ( & tmp .head , NULL ) ;
425
+ atomic_init ( & tmp .tail , NULL ) ;
417
426
tmp .shadow_head = NULL ;
418
427
tmp .nemesis_advisory_queuelen = 0 ;
419
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> advisory_queuelen );
428
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ) , q -> advisory_queuelen );
420
429
PARANOIA (sanity_check_tq (& q -> q ));
421
430
while ((curs = qt_internal_NEMESIS_dequeue_st (& q -> q ))) {
422
431
qthread_t * t = curs -> thread ;
423
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> advisory_queuelen );
432
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ) , q -> advisory_queuelen );
424
433
PARANOIA (sanity_check_tq (& tmp ));
425
434
PARANOIA (sanity_check_tq (& q -> q ));
426
435
switch (f (t )) {
427
436
case IGNORE_AND_CONTINUE : // ignore, move on
428
437
prev = qt_internal_atomic_swap_ptr ((void * * )& (tmp .tail ), curs );
429
438
if (prev == NULL ) {
430
- tmp .head = curs ;
439
+ atomic_store_explicit ( & tmp .head , curs , memory_order_relaxed ) ;
431
440
} else {
432
- prev -> next = curs ;
441
+ atomic_store_explicit ( & prev -> next , curs , memory_order_relaxed ) ;
433
442
}
434
443
tmp .nemesis_advisory_queuelen ++ ;
435
444
break ;
436
445
case IGNORE_AND_STOP : // ignore, stop looking
437
446
prev = qt_internal_atomic_swap_ptr ((void * * )& (tmp .tail ), curs );
438
447
if (prev == NULL ) {
439
- tmp .head = curs ;
448
+ atomic_store_explicit ( & tmp .head , curs , memory_order_relaxed ) ;
440
449
} else {
441
- prev -> next = curs ;
450
+ atomic_store_explicit ( & prev -> next , curs , memory_order_relaxed ) ;
442
451
}
443
452
tmp .nemesis_advisory_queuelen ++ ;
444
453
goto pushback ;
@@ -458,24 +467,24 @@ void INTERNAL qt_threadqueue_filter(qt_threadqueue_t *q,
458
467
}
459
468
pushback :
460
469
/* dequeue the rest of the queue */
461
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> advisory_queuelen );
462
- qthread_debug (THREADQUEUE_DETAILS , "tmp {head:%p tail:%p} tmp->advisory_queuelen:%u\n" , tmp .head , tmp .tail , tmp .nemesis_advisory_queuelen );
470
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ) , q -> advisory_queuelen );
471
+ qthread_debug (THREADQUEUE_DETAILS , "tmp {head:%p tail:%p} tmp->advisory_queuelen:%u\n" , atomic_load_explicit ( & tmp .head , memory_order_relaxed ), atomic_load_explicit ( & tmp .tail , memory_order_relaxed ) , tmp .nemesis_advisory_queuelen );
463
472
PARANOIA (sanity_check_tq (& tmp ));
464
- if (q -> q .head ) {
473
+ if (atomic_load_explicit ( & q -> q .head , memory_order_relaxed ) ) {
465
474
prev = qt_internal_atomic_swap_ptr ((void * * )& (tmp .tail ), q -> q .head );
466
475
if (prev == NULL ) {
467
- tmp .head = q -> q .head ;
476
+ atomic_store_explicit ( & tmp .head , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), memory_order_relaxed ) ;
468
477
} else {
469
- prev -> next = q -> q .head ;
478
+ atomic_store_explicit ( & prev -> next , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), memory_order_relaxed ) ;
470
479
}
471
480
tmp .nemesis_advisory_queuelen += q -> advisory_queuelen ;
472
- tmp .tail = q -> q .tail ;
481
+ atomic_store_explicit ( & tmp .tail , atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ), memory_order_relaxed ) ;
473
482
}
474
- q -> q .head = tmp .head ;
475
- q -> q .tail = tmp .tail ;
483
+ atomic_store_explicit ( & q -> q .head , atomic_load_explicit ( & tmp .head , memory_order_relaxed ), memory_order_relaxed ) ;
484
+ atomic_store_explicit ( & q -> q .tail , atomic_load_explicit ( & tmp .tail , memory_order_relaxed ), memory_order_relaxed ) ;
476
485
q -> q .shadow_head = NULL ;
477
486
q -> advisory_queuelen = tmp .nemesis_advisory_queuelen ;
478
- qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , q -> q .head , q -> q .tail , q -> advisory_queuelen );
487
+ qthread_debug (THREADQUEUE_DETAILS , "q(%p)->q {head:%p tail:%p} q->advisory_queuelen:%u\n" , q , atomic_load_explicit ( & q -> q .head , memory_order_relaxed ), atomic_load_explicit ( & q -> q .tail , memory_order_relaxed ) , q -> advisory_queuelen );
479
488
PARANOIA (sanity_check_tq (& q -> q ));
480
489
} /*}}}*/
481
490
0 commit comments