1 #ifndef _STATIC_UNORDERED_MAP_H_
2 #define _STATIC_UNORDERED_MAP_H_
4 #include "small_unordered_map.h"
7 * XXX(stephentu): allow custom allocator
9 template <typename Key,
11 size_t StaticSize = SMALL_SIZE_MAP,
12 typename Hash = private_::myhash<Key>>
13 class static_unordered_map {
16 typedef T mapped_type;
17 typedef std::pair<const key_type, mapped_type> value_type;
19 typedef T & reference;
20 typedef const T & const_reference;
23 typedef std::unordered_map<Key, T, Hash> large_table_type;
24 typedef std::pair<key_type, mapped_type> bucket_value_type;
26 static const bool is_trivially_destructible =
27 private_::is_trivially_destructible<bucket_value_type>::value;
29 static const size_t TableSize = private_::TableSize(StaticSize);
30 static_assert(StaticSize >= 1, "XXX");
31 static_assert(TableSize >= 1, "XXX");
34 inline ALWAYS_INLINE bucket_value_type *
37 return reinterpret_cast<bucket_value_type *>(&buf[0]);
40 inline ALWAYS_INLINE const bucket_value_type *
43 return reinterpret_cast<const bucket_value_type *>(&buf[0]);
46 inline ALWAYS_INLINE bucket_value_type &
52 inline ALWAYS_INLINE const bucket_value_type &
58 template <class... Args>
59 inline ALWAYS_INLINE void
60 construct(size_t hash, Args &&... args)
63 new (&ref()) bucket_value_type(std::forward<Args>(args)...);
66 inline ALWAYS_INLINE void
69 if (!is_trivially_destructible)
70 ref().~bucket_value_type();
75 char buf[sizeof(value_type)];
78 // iterators are not stable across mutation
79 template <typename BucketType, typename ValueType>
80 class iterator_ : public std::iterator<std::forward_iterator_tag, ValueType> {
81 friend class static_unordered_map;
83 inline iterator_() : b(0) {}
85 template <typename B, typename V>
86 inline iterator_(const iterator_<B, V> &other)
93 return reinterpret_cast<ValueType &>(b->ref());
99 return reinterpret_cast<ValueType *>(b->ptr());
103 operator==(const iterator_ &o) const
109 operator!=(const iterator_ &o) const
111 return !operator==(o);
124 iterator_ cur = *this;
130 inline iterator_(BucketType *b)
140 chain_length(bucket *b)
152 typedef iterator_<bucket, value_type> iterator;
153 typedef iterator_<const bucket, const value_type> const_iterator;
155 static_unordered_map()
158 NDB_MEMSET(&table[0], 0, sizeof(table));
161 ~static_unordered_map()
163 for (size_t i = 0; i < n; i++)
165 #ifdef ENABLE_EVENT_COUNTERS
167 for (size_t i = 0; i < TableSize; i++) {
168 const size_t l = chain_length(table[i]);
169 ml = std::max(ml, l);
173 private_::evt_avg_max_unordered_map_chain_length.offer(ml);
177 static_unordered_map(const static_unordered_map &other)
180 NDB_MEMSET(&table[0], 0, sizeof(table));
184 static_unordered_map &
185 operator=(const static_unordered_map &other)
188 if (unlikely(this == &other))
196 find_bucket(const key_type &k, size_t *hash_value)
198 const size_t h = Hash()(k);
201 const size_t i = h % TableSize;
202 bucket *b = table[i];
204 const bool check_hash = private_::is_eq_expensive<key_type>::value;
205 if ((!check_hash || b->h == h) && b->ref().first == k)
212 inline ALWAYS_INLINE const bucket *
213 find_bucket(const key_type &k, size_t *hash_value) const
215 return const_cast<static_unordered_map *>(this)->find_bucket(k, hash_value);
220 // XXX(stephentu): template away this stuff
223 operator[](const key_type &k)
226 bucket *b = find_bucket(k, &h);
228 return b->ref().second;
229 INVARIANT(n < StaticSize);
231 b->construct(h, k, mapped_type());
232 const size_t i = h % TableSize;
235 return b->ref().second;
239 operator[](key_type &&k)
242 bucket *b = find_bucket(k, &h);
244 return b->ref().second;
245 INVARIANT(n < StaticSize);
247 b->construct(h, std::move(k), mapped_type());
248 const size_t i = h % TableSize;
251 return b->ref().second;
269 return iterator(&elems[0]);
275 return const_iterator(&elems[0]);
281 return iterator(&elems[n]);
284 inline const_iterator
287 return const_iterator(&elems[n]);
291 find(const key_type &k)
293 bucket * const b = find_bucket(k, 0);
300 find(const key_type &k) const
302 const bucket * const b = find_bucket(k, 0);
304 return const_iterator(b);
313 NDB_MEMSET(&table[0], 0, sizeof(table));
314 for (size_t i = 0; i < n; i++)
321 inline bool is_small_type() const { return true; }
325 // doesn't check for self assignment
327 assignFrom(const static_unordered_map &that)
330 for (size_t i = 0; i < that.n; i++) {
331 bucket * const b = &elems[n++];
332 const bucket * const that_b = &that.elems[i];
333 b->construct(that_b->h, that_b->ref().first, that_b->ref().second);
334 const size_t idx = b->h % TableSize;
335 b->bnext = table[idx];
341 bucket elems[StaticSize];
342 bucket *table[TableSize];
345 #endif /* _STATIC_UNORDERED_MAP_H_ */