4 #include "model-assert.h"
8 #define relaxed memory_order_relaxed
9 #define release memory_order_release
10 #define acquire memory_order_acquire
12 #define MAX_FREELIST 4 /* Each thread can own up to MAX_FREELIST free nodes */
13 #define INITIAL_FREE 2 /* Each thread starts with INITIAL_FREE free nodes */
15 #define POISON_IDX 0x666
17 static unsigned int (*free_lists)[MAX_FREELIST];
19 /* Search this thread's free list for a "new" node */
20 static unsigned int new_node()
23 int t = get_thread_num();
24 for (i = 0; i < MAX_FREELIST; i++) {
25 //unsigned int node = load_32(&free_lists[t][i]);
26 unsigned int node = free_lists[t][i];
28 //store_32(&free_lists[t][i], 0);
33 /* free_list is empty? */
38 /* Simulate the fact that when a node got recycled, it will get assigned to the
39 * same queue or for other usage */
40 void simulateRecycledNodeUpdate(queue_t *q, unsigned int node) {
41 atomic_store_explicit(&q->nodes[node].next, -1, memory_order_release);
45 /* Place this node index back on this thread's free list */
46 static void reclaim(unsigned int node)
49 int t = get_thread_num();
51 /* Don't reclaim NULL node */
54 for (i = 0; i < MAX_FREELIST; i++) {
55 /* Should never race with our own thread here */
56 //unsigned int idx = load_32(&free_lists[t][i]);
57 unsigned int idx = free_lists[t][i];
59 /* Found empty spot in free list */
61 //store_32(&free_lists[t][i], node);
62 free_lists[t][i] = node;
66 /* free list is full? */
70 void init_queue(queue_t *q, int num_threads)
74 /* Initialize each thread's free list with INITIAL_FREE pointers */
75 /* The actual nodes are initialized with poison indexes */
76 free_lists = ( unsigned int (*)[MAX_FREELIST] ) malloc(num_threads * sizeof(*free_lists));
77 for (i = 0; i < num_threads; i++) {
78 for (j = 0; j < INITIAL_FREE; j++) {
79 free_lists[i][j] = 2 + i * MAX_FREELIST + j;
80 atomic_init(&q->nodes[free_lists[i][j]].next, MAKE_POINTER(POISON_IDX, 0));
84 /* initialize queue */
85 atomic_init(&q->head, MAKE_POINTER(1, 0));
86 atomic_init(&q->tail, MAKE_POINTER(1, 0));
87 atomic_init(&q->nodes[1].next, MAKE_POINTER(0, 0));
90 /** @DeclareState: IntList *q;
91 @Initial: q = new IntList;
93 model_print("\tSTATE(q): ");
97 /** @Transition: STATE(q)->push_back(val);
98 @Print: model_print("\tENQ #%d: val=%d\n", ID, val); */
99 void enqueue(queue_t *q, unsigned int val, int n)
108 //store_32(&q->nodes[node].value, val);
109 q->nodes[node].value = val;
110 tmp = atomic_load_explicit(&q->nodes[node].next, relaxed);
111 set_ptr(&tmp, 0); // NULL
112 // XXX-known-bug-#1: This is a found bug in AutoMO, and testcase4 can reveal
114 // To reproduce, weaken the parameter "memory_order_release" to
115 // "memory_order_relaxed", run "make" to recompile, and then run:
116 // "./run.sh ./ms-queue/testcase4 -m2 -y -u3 -tSPEC"
117 /********** Detected KNOWN BUG (testcase4) **********/
118 atomic_store_explicit(&q->nodes[node].next, tmp, release);
121 // XXX-injection-#1: To reproduce, weaken the parameter
122 // "memory_order_acquire" to "memory_order_relaxed", run "make" to
123 // recompile, and then run:
124 // "./run.sh ./ms-queue/testcase2 -m2 -y -u3 -tSPEC"
125 /********** Detected UL (testcase2) **********/
126 tail = atomic_load_explicit(&q->tail, acquire);
127 // XXX-injection-#2: To reproduce, weaken the parameter
128 // "memory_order_acquire" to "memory_order_relaxed", run "make" to
129 // recompile, and then run:
130 // "./run.sh ./ms-queue/testcase4 -m2 -y -u3 -tSPEC"
131 /********** Detected Correctness (testcase4) **********/
132 next = atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire);
133 if (tail == atomic_load_explicit(&q->tail, relaxed)) {
135 /* Check for uninitialized 'next' */
136 //MODEL_ASSERT(get_ptr(next) != POISON_IDX);
138 if (get_ptr(next) == 0) { // == NULL
139 pointer value = MAKE_POINTER(node, get_count(next) + 1);
140 // XXX-injection-#3: To reproduce, weaken the parameter
141 // "memory_order_release" to "memory_order_relaxed", run "make" to
142 // recompile, and then run:
143 // "./run.sh ./ms-queue/testcase1 -m2 -y -u3 -tSPEC"
144 /********** Detected Correctness (testcase1) **********/
145 success = atomic_compare_exchange_strong_explicit(&q->nodes[get_ptr(tail)].next,
146 &next, value, release, release);
147 /** @OPClearDefine: success */
150 // XXX-injection-#4: To reproduce, weaken the parameter
151 // "memory_order_acquire" to "memory_order_relaxed", run "make" to
152 // recompile, and then run:
153 // "./run.sh ./ms-queue/testcase2 -m2 -y -u3 -tSPEC"
154 /********** Detected UL (testcase2) **********/
155 unsigned int ptr = get_ptr(atomic_load_explicit(&q->nodes[get_ptr(tail)].next, acquire));
156 pointer value = MAKE_POINTER(ptr,
157 get_count(tail) + 1);
158 // XXX-injection-#5: To reproduce, weaken the parameter
159 // "memory_order_release" to "memory_order_relaxed", run "make" to
160 // recompile, and then run:
161 // "./run.sh ./ms-queue/testcase2 -m2 -y -u3 -tSPEC"
162 /********** Detected Correctness (testcase2) **********/
163 atomic_compare_exchange_strong_explicit(&q->tail,
171 // XXX-injection-#6: To reproduce, weaken the parameter
172 // "memory_order_release" to "memory_order_relaxed", run "make" to
173 // recompile, and then run:
174 // "./run.sh ./ms-queue/testcase1 -m2 -y -u3 -tSPEC"
175 /********** Detected Correctness (testcase1) **********/
176 atomic_compare_exchange_strong_explicit(&q->tail,
178 MAKE_POINTER(node, get_count(tail) + 1),
182 /** @Transition: S_RET = STATE(q)->empty() ? 0 : STATE(q)->front();
183 if (S_RET && C_RET) STATE(q)->pop_front();
184 @JustifyingPostcondition: if (!C_RET)
185 return S_RET == C_RET;
186 @PostCondition: return C_RET ? *retVal == S_RET : true;
187 @Print: model_print("\tDEQ #%d: C_RET=%d && *retVal=%d && S_RET=%d\n", ID,
188 C_RET, *retVal, S_RET);
190 int dequeue(queue_t *q, unsigned int *retVal, unsigned int *reclaimNode)
198 // XXX-injection-#7: To reproduce, weaken the parameter
199 // "memory_order_acquire" to "memory_order_relaxed", run "make" to
200 // recompile, and then run:
201 // "./run.sh ./ms-queue/testcase3 -m2 -y -u3 -tSPEC"
202 /********** Detected Correctness (testcase3) **********/
203 head = atomic_load_explicit(&q->head, acquire);
204 // To reproduce, weaken the parameter "memory_order_acquire" to
205 // "memory_order_relaxed", run "make" to recompile, and then run:
206 // "./run.sh ./ms-queue/testcase4 -m2 -y -u3 -tSPEC"
207 // XXX-known-bug-#2: This is another known bug, and testcase2 can reveal
209 /********** Detected KNOWN BUG (testcase2) **********/
210 tail = atomic_load_explicit(&q->tail, acquire);
212 // XXX-injection-#8: To reproduce, weaken the parameter
213 // "memory_order_acquire" to "memory_order_relaxed", run "make" to
214 // recompile, and then run:
215 // "./run.sh ./ms-queue/testcase1 -m2 -y -u3 -tSPEC"
216 /********** Detected Correctness (testcase1) **********/
217 next = atomic_load_explicit(&q->nodes[get_ptr(head)].next, acquire);
218 /** @OPClearDefine: true */
219 if (atomic_load_explicit(&q->head, relaxed) == head) {
220 if (get_ptr(head) == get_ptr(tail)) {
222 /* Check for uninitialized 'next' */
223 MODEL_ASSERT(get_ptr(next) != POISON_IDX);
225 if (get_ptr(next) == 0) { // NULL
226 return false; // NULL
229 // XXX-injection-#9: To reproduce, weaken the parameter
230 // "memory_order_release" to "memory_order_relaxed", run "make" to
231 // recompile, and then run:
232 // "./run.sh ./ms-queue/testcase2 -m2 -y -u3 -tSPEC"
233 /********** Detected UL (testcase2) **********/
234 atomic_compare_exchange_strong_explicit(&q->tail,
236 MAKE_POINTER(get_ptr(next), get_count(tail) + 1),
240 //*retVal = load_32(&q->nodes[get_ptr(next)].value);
241 *retVal = q->nodes[get_ptr(next)].value;
243 // XXX-injection-#10: To reproduce, weaken the parameter
244 // "memory_order_release" to "memory_order_relaxed", run "make" to
245 // recompile, and then run:
246 // "./run.sh ./ms-queue/testcase3 -m2 -y -u3 -tSPEC"
247 /********** Detected Correctness (testcase3) **********/
248 success = atomic_compare_exchange_strong_explicit(&q->head,
250 MAKE_POINTER(get_ptr(next), get_count(head) + 1),
257 *reclaimNode = get_ptr(head);
258 reclaim(get_ptr(head));