#ifndef _SALTICIDAE_QUEUE_H
#define _SALTICIDAE_QUEUE_H
#include <atomic>
#include <vector>
#include <cassert>
#include <thread>
namespace salticidae {
static size_t const cacheline_size = 64;
class FreeList {
public:
struct Node {
std::atomic<Node *> next;
std::atomic<size_t> refcnt;
Node(): next(nullptr), refcnt(1) {}
};
private:
alignas(cacheline_size) std::atomic<Node *> top;
public:
FreeList(): top(nullptr) {}
FreeList(const FreeList &) = delete;
FreeList(FreeList &&) = delete;
void release_ref(Node *u) {
if (u->refcnt.fetch_sub(1, std::memory_order_relaxed) != 1) return;
for (;;)
{
auto t = top.load(std::memory_order_relaxed);
// repair the next pointer before CAS, otherwise u->next == nullptr
// could lead to skipping elements
u->next.store(t, std::memory_order_relaxed);
// the replacement is ok even if ABA happens
if (top.compare_exchange_weak(t, u, std::memory_order_release))
{
u->refcnt.store(1, std::memory_order_relaxed);
break;
}
}
}
bool push(Node *u) {
release_ref(u);
return true;
}
bool pop(Node *&r) {
bool loop = true;
while (loop)
{
auto u = top.load(std::memory_order_acquire);
/* the list is now empty */
if (u == nullptr) return false;
auto t = u->refcnt.load(std::memory_order_relaxed);
/* let's wait for another round if u is a ghost (already popped) */
if (!t) continue;
/* otherwise t > 0, so with CAS, the invariant that zero refcnt can
* never be increased is guaranteed */
if (u->refcnt.compare_exchange_weak(t, t + 1, std::memory_order_relaxed))
{
/* here, nobody is able to change v->next (because v->next is
* only changed when pushed) even when ABA happens */
auto v = u;
auto nv = u->next.load(std::memory_order_relaxed);
if (top.compare_exchange_weak(v, nv,