MEDIUM: memory: Use the new _HA_ATOMIC_* macros.

Use the new _HA_ATOMIC_* macros and add barriers where needed.
diff --git a/include/common/memory.h b/include/common/memory.h
index 9c54422..43ab8e9 100644
--- a/include/common/memory.h
+++ b/include/common/memory.h
@@ -229,8 +229,9 @@
 		__ha_barrier_load();
 		new.free_list = *POOL_LINK(pool, cmp.free_list);
 	} while (__ha_cas_dw((void *)&pool->free_list, (void *)&cmp, (void *)&new) == 0);
+	__ha_barrier_atomic_store();
 
-	HA_ATOMIC_ADD(&pool->used, 1);
+	_HA_ATOMIC_ADD(&pool->used, 1);
 #ifdef DEBUG_MEMORY_POOLS
 	/* keep track of where the element was allocated from */
 	*POOL_LINK(pool, cmp.free_list) = (void *)pool;
@@ -288,8 +289,9 @@
 	do {
 		*POOL_LINK(pool, ptr) = (void *)free_list;
 		__ha_barrier_store();
-	} while (!HA_ATOMIC_CAS(&pool->free_list, &free_list, ptr));
-	HA_ATOMIC_SUB(&pool->used, 1);
+	} while (!_HA_ATOMIC_CAS(&pool->free_list, &free_list, ptr));
+	__ha_barrier_atomic_store();
+	_HA_ATOMIC_SUB(&pool->used, 1);
 }
 
 /* frees an object to the local cache, possibly pushing oldest objects to the
diff --git a/src/memory.c b/src/memory.c
index b200c38..ef7ec93 100644
--- a/src/memory.c
+++ b/src/memory.c
@@ -159,13 +159,13 @@
 
 	while (1) {
 		if (limit && allocated >= limit) {
-			HA_ATOMIC_ADD(&pool->allocated, allocated - allocated_orig);
+			_HA_ATOMIC_ADD(&pool->allocated, allocated - allocated_orig);
 			return NULL;
 		}
 
 		ptr = malloc(size + POOL_EXTRA);
 		if (!ptr) {
-			HA_ATOMIC_ADD(&pool->failed, 1);
+			_HA_ATOMIC_ADD(&pool->failed, 1);
 			if (failed)
 				return NULL;
 			failed++;
@@ -179,11 +179,12 @@
 		do {
 			*POOL_LINK(pool, ptr) = free_list;
 			__ha_barrier_store();
-		} while (HA_ATOMIC_CAS(&pool->free_list, &free_list, ptr) == 0);
+		} while (_HA_ATOMIC_CAS(&pool->free_list, &free_list, ptr) == 0);
 	}
+	__ha_barrier_atomic_store();
 
-	HA_ATOMIC_ADD(&pool->allocated, allocated - allocated_orig);
-	HA_ATOMIC_ADD(&pool->used, 1);
+	_HA_ATOMIC_ADD(&pool->allocated, allocated - allocated_orig);
+	_HA_ATOMIC_ADD(&pool->used, 1);
 
 #ifdef DEBUG_MEMORY_POOLS
 	/* keep track of where the element was allocated from */
@@ -210,7 +211,8 @@
 		return;
 	do {
 		next = pool->free_list;
-	} while (!HA_ATOMIC_CAS(&pool->free_list, &next, NULL));
+	} while (!_HA_ATOMIC_CAS(&pool->free_list, &next, NULL));
+	__ha_barrier_atomic_store();
 	while (next) {
 		temp = next;
 		next = *POOL_LINK(pool, temp);
@@ -218,7 +220,7 @@
 		free(temp);
 	}
 	pool->free_list = next;
-	HA_ATOMIC_SUB(&pool->allocated, removed);
+	_HA_ATOMIC_SUB(&pool->allocated, removed);
 	/* here, we should have pool->allocate == pool->used */
 }
 
@@ -235,7 +237,7 @@
 	int cur_recurse = 0;
 	struct pool_head *entry;
 
-	if (recurse || !HA_ATOMIC_CAS(&recurse, &cur_recurse, 1))
+	if (recurse || !_HA_ATOMIC_CAS(&recurse, &cur_recurse, 1))
 		return;
 
 	list_for_each_entry(entry, &pools, list) {
@@ -253,11 +255,11 @@
 			if (__ha_cas_dw(&entry->free_list, &cmp, &new) == 0)
 				continue;
 			free(cmp.free_list);
-			HA_ATOMIC_SUB(&entry->allocated, 1);
+			_HA_ATOMIC_SUB(&entry->allocated, 1);
 		}
 	}
 
-	HA_ATOMIC_STORE(&recurse, 0);
+	_HA_ATOMIC_STORE(&recurse, 0);
 }
 
 /* frees an object to the local cache, possibly pushing oldest objects to the
@@ -386,7 +388,7 @@
 	int cur_recurse = 0;
 	struct pool_head *entry;
 
-	if (recurse || !HA_ATOMIC_CAS(&recurse, &cur_recurse, 1))
+	if (recurse || !_HA_ATOMIC_CAS(&recurse, &cur_recurse, 1))
 		return;
 
 	list_for_each_entry(entry, &pools, list) {
@@ -407,7 +409,7 @@
 			HA_SPIN_UNLOCK(POOL_LOCK, &entry->lock);
 	}
 
-	HA_ATOMIC_STORE(&recurse, 0);
+	_HA_ATOMIC_STORE(&recurse, 0);
 }
 #endif