Summary: This adds `asm_volatile_memory`, which goes along with the same style used by `asm_volatile_pause`.
This also switches the two places in `RWSpinLock.h` that were using inline assembly for this to use the new functions instead.
Closes #260
Reviewed By: @yfeldblum
Differential Revision:
D2283541
Pulled By: @sgolemon
namespace folly {
+inline void asm_volatile_memory() {
+#if defined(__clang__) || defined(__GNUC__)
+ asm volatile("" : : : "memory");
+#elif defined(_MSC_VER)
+ ::_ReadWriteBarrier();
+#endif
+}
+
inline void asm_volatile_pause() {
#if defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
::_mm_pause();
private: // Some x64-specific utilities for atomic access to ticket.
template<class T> static T load_acquire(T* addr) {
T t = *addr; // acquire barrier
- asm volatile("" : : : "memory");
+ asm_volatile_memory();
return t;
}
template<class T>
static void store_release(T* addr, T v) {
- asm volatile("" : : : "memory");
+ asm_volatile_memory();
*addr = v; // release barrier
}