9 #if !MASSTREE_COMPILER_HH
10 // assumes cache-aligned
11 static inline ALWAYS_INLINE void
12 prefetch(const void *ptr)
14 typedef struct { char x[CACHELINE_SIZE]; } cacheline_t;
15 asm volatile("prefetcht0 %0" : : "m" (*(const cacheline_t *) ptr));
17 #define PREFETCH_DEFINED 1
20 // assumes cache-aligned
22 static inline ALWAYS_INLINE void
23 prefetch_object(const T *ptr)
25 for (unsigned i = CACHELINE_SIZE;
26 i < std::min(static_cast<unsigned>(sizeof(*ptr)),
27 static_cast<unsigned>(4 * CACHELINE_SIZE));
29 prefetch((const char *) ptr + i);
32 // prefetch an object resident in [ptr, ptr + n). doesn't assume cache aligned
33 static inline ALWAYS_INLINE void
34 prefetch_bytes(const void *p, size_t n)
36 const char *ptr = (const char *) p;
37 // round down to nearest cacheline, then prefetch
38 const void * const pend =
39 std::min(ptr + n, ptr + 4 * CACHELINE_SIZE);
40 ptr = (const char *) util::round_down<uintptr_t, LG_CACHELINE_SIZE>((uintptr_t) ptr);
42 // manually unroll loop 3 times
43 ptr += CACHELINE_SIZE;
46 ptr += CACHELINE_SIZE;
49 ptr += CACHELINE_SIZE;
54 #endif /* _PREFETCH_H_ */