+void wait_for_quiescent_state(void)
+{
+ LIST_HEAD(qsreaders);
+ int wait_loops = 0;
+ struct urcu_reader *index, *tmp;
+
+ if (list_empty(®istry))
+ return;
+ /*
+ * Wait for each thread urcu_reader.ctr count to become 0.
+ */
+ for (;;) {
+ wait_loops++;
+ if (wait_loops == RCU_QS_ACTIVE_ATTEMPTS) {
+ uatomic_dec(&gp_futex);
+ /* Write futex before read reader_gp */
+ force_mb_all_threads();
+ }
+
+ list_for_each_entry_safe(index, tmp, ®istry, head) {
+ if (!rcu_old_gp_ongoing(&index->ctr))
+ list_move(&index->head, &qsreaders);
+ }
+
+#ifndef HAS_INCOHERENT_CACHES
+ if (list_empty(®istry)) {
+ if (wait_loops == RCU_QS_ACTIVE_ATTEMPTS) {
+ /* Read reader_gp before write futex */
+ force_mb_all_threads();
+ uatomic_set(&gp_futex, 0);
+ }
+ break;
+ } else {
+ if (wait_loops == RCU_QS_ACTIVE_ATTEMPTS)
+ wait_gp();
+ else
+ cpu_relax();
+ }
+#else /* #ifndef HAS_INCOHERENT_CACHES */
+ /*
+ * BUSY-LOOP. Force the reader thread to commit its
+ * urcu_reader.ctr update to memory if we wait for too long.
+ */
+ if (list_empty(®istry)) {
+ if (wait_loops == RCU_QS_ACTIVE_ATTEMPTS) {
+ /* Read reader_gp before write futex */
+ force_mb_all_threads();
+ uatomic_set(&gp_futex, 0);
+ }
+ break;
+ } else {
+ switch (wait_loops) {
+ case RCU_QS_ACTIVE_ATTEMPTS:
+ wait_gp();
+ break; /* only escape switch */
+ case KICK_READER_LOOPS:
+ force_mb_all_threads();
+ wait_loops = 0;
+ break; /* only escape switch */
+ default:
+ cpu_relax();
+ }
+ }
+#endif /* #else #ifndef HAS_INCOHERENT_CACHES */