edits
[model-checker-benchmarks.git] / ms-queue / queue.c
1 #include <threads.h>
2 #include <stdlib.h>
3 #include "librace.h"
4 #include "model-assert.h"
5
6 #include "queue.h"
7
8 #define relaxed memory_order_relaxed
9 #define release memory_order_release
10 #define acquire memory_order_acquire
11
12 #define MAX_FREELIST 4 /* Each thread can own up to MAX_FREELIST free nodes */
13 #define INITIAL_FREE 2 /* Each thread starts with INITIAL_FREE free nodes */
14
15 #define POISON_IDX 0x666
16
17 static unsigned int (*free_lists)[MAX_FREELIST];
18
19 /* Search this thread's free list for a "new" node */
20 static unsigned int new_node()
21 {
22         int i;
23         int t = get_thread_num();
24         for (i = 0; i < MAX_FREELIST; i++) {
25                 unsigned int node = load_32(&free_lists[t][i]);
26                 if (node) {
27                         store_32(&free_lists[t][i], 0);
28                         return node;
29                 }
30         }
31         /* free_list is empty? */
32         MODEL_ASSERT(0);
33         return 0;
34 }
35
36 /* Simulate the fact that when a node got recycled, it will get assigned to the
37  * same queue or for other usage */
38 void simulateRecycledNodeUpdate(queue_t *q, unsigned int node) {
39         atomic_store_explicit(&q->nodes[node].next, -1, memory_order_release);
40 }
41
42
43 /* Place this node index back on this thread's free list */
44 static void reclaim(unsigned int node)
45 {
46         int i;
47         int t = get_thread_num();
48
49         /* Don't reclaim NULL node */
50         MODEL_ASSERT(node);
51
52         for (i = 0; i < MAX_FREELIST; i++) {
53                 /* Should never race with our own thread here */
54                 unsigned int idx = load_32(&free_lists[t][i]);
55
56                 /* Found empty spot in free list */
57                 if (idx == 0) {
58                         store_32(&free_lists[t][i], node);
59                         return;
60                 }
61         }
62         /* free list is full? */
63         MODEL_ASSERT(0);
64 }
65
66 void init_queue(queue_t *q, int num_threads)
67 {
68         int i, j;
69
70         /* Initialize each thread's free list with INITIAL_FREE pointers */
71         /* The actual nodes are initialized with poison indexes */
72         free_lists = malloc(num_threads * sizeof(*free_lists));
73         for (i = 0; i < num_threads; i++) {
74                 for (j = 0; j < INITIAL_FREE; j++) {
75                         free_lists[i][j] = 2 + i * MAX_FREELIST + j;
76                         atomic_init(&q->nodes[free_lists[i][j]].next, MAKE_POINTER(POISON_IDX, 0));
77                 }
78         }
79
80         /* initialize queue */
81         atomic_init(&q->head, MAKE_POINTER(1, 0));
82         atomic_init(&q->tail, MAKE_POINTER(1, 0));
83         atomic_init(&q->nodes[1].next, MAKE_POINTER(0, 0));
84 }
85
86 void enqueue(queue_t *q, unsigned int val, int n)
87 {
88         int success = 0;
89         unsigned int node;
90         pointer tail;
91         pointer next;
92         pointer tmp;
93
94         node = new_node();
95         store_32(&q->nodes[node].value, val);
96         tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
97         set_ptr(&tmp, 0); // NULL
98         atomic_store_explicit(&q->nodes[node].next, tmp, relaxed);
99
100         while (!success) {
101                 tail = atomic_load_explicit(&q->tail, acquire);
102                 next = atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire);
103                 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
104
105                         /* Check for uninitialized 'next' */
106                         MODEL_ASSERT(get_ptr(next) != POISON_IDX);
107
108                         if (get_ptr(next) == 0) { // == NULL
109                                 pointer value = MAKE_POINTER(node, get_count(next) + 1);
110                                 success = atomic_compare_exchange_strong_explicit(&q->nodes[get_ptr(tail)].next,
111                                                 &next, value, release, release);
112                         }
113                         if (!success) {
114                                 unsigned int ptr = get_ptr(atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire));
115                                 pointer value = MAKE_POINTER(ptr,
116                                                 get_count(tail) + 1);
117                                 atomic_compare_exchange_strong_explicit(&q->tail,
118                                                 &tail, value,
119                                                 release, release);
120                                 thrd_yield();
121                         }
122                 }
123         }
124         atomic_compare_exchange_strong_explicit(&q->tail,
125                         &tail,
126                         MAKE_POINTER(node, get_count(tail) + 1),
127                         release, release);
128 }
129
130 bool dequeue(queue_t *q, unsigned int *retVal, unsigned int *reclaimNode)
131 {
132         int success = 0;
133         pointer head;
134         pointer tail;
135         pointer next;
136
137         while (!success) {
138                 head = atomic_load_explicit(&q->head, acquire);
139                 tail = atomic_load_explicit(&q->tail, relaxed);
140                 next = atomic_load_explicit(&q->nodes[get_ptr(head)].next, acquire);
141                 if (atomic_load_explicit(&q->head, relaxed) == head) {
142                         if (get_ptr(head) == get_ptr(tail)) {
143
144                                 /* Check for uninitialized 'next' */
145                                 MODEL_ASSERT(get_ptr(next) != POISON_IDX);
146
147                                 if (get_ptr(next) == 0) { // NULL
148                                         return false; // NULL
149                                 }
150                                 atomic_compare_exchange_strong_explicit(&q->tail,
151                                                 &tail,
152                                                 MAKE_POINTER(get_ptr(next), get_count(tail) + 1),
153                                                 release, release);
154                                 thrd_yield();
155                         } else {
156                                 *retVal = load_32(&q->nodes[get_ptr(next)].value);
157                                 success = atomic_compare_exchange_strong_explicit(&q->head,
158                                                 &head,
159                                                 MAKE_POINTER(get_ptr(next), get_count(head) + 1),
160                                                 release, release);
161                                 if (!success)
162                                         thrd_yield();
163                         }
164                 }
165         }
166         reclaimNode = get_ptr(head);
167         reclaim(get_ptr(head));
168         return true;
169 }