1 //  Copyright (c) 2011-present, Facebook, Inc.  All rights reserved.
2 //  This source code is licensed under both the GPLv2 (found in the
3 //  COPYING file in the root directory) and Apache 2.0 License
4 //  (found in the LICENSE.Apache file in the root directory).
5 //
6 #ifndef ROCKSDB_LITE
7 #include "rocksdb/memtablerep.h"
8 
9 #include <unordered_set>
10 #include <set>
11 #include <memory>
12 #include <algorithm>
13 #include <type_traits>
14 
15 #include "db/memtable.h"
16 #include "memory/arena.h"
17 #include "memtable/stl_wrappers.h"
18 #include "port/port.h"
19 #include "util/mutexlock.h"
20 
21 namespace ROCKSDB_NAMESPACE {
22 namespace {
23 
24 using namespace stl_wrappers;
25 
26 class VectorRep : public MemTableRep {
27  public:
28   VectorRep(const KeyComparator& compare, Allocator* allocator, size_t count);
29 
30   // Insert key into the collection. (The caller will pack key and value into a
31   // single buffer and pass that in as the parameter to Insert)
32   // REQUIRES: nothing that compares equal to key is currently in the
33   // collection.
34   void Insert(KeyHandle handle) override;
35 
36   // Returns true iff an entry that compares equal to key is in the collection.
37   bool Contains(const char* key) const override;
38 
39   void MarkReadOnly() override;
40 
41   size_t ApproximateMemoryUsage() override;
42 
43   void Get(const LookupKey& k, void* callback_args,
44            bool (*callback_func)(void* arg, const char* entry)) override;
45 
~VectorRep()46   ~VectorRep() override {}
47 
48   class Iterator : public MemTableRep::Iterator {
49     class VectorRep* vrep_;
50     std::shared_ptr<std::vector<const char*>> bucket_;
51     std::vector<const char*>::const_iterator mutable cit_;
52     const KeyComparator& compare_;
53     std::string tmp_;       // For passing to EncodeKey
54     bool mutable sorted_;
55     void DoSort() const;
56    public:
57     explicit Iterator(class VectorRep* vrep,
58       std::shared_ptr<std::vector<const char*>> bucket,
59       const KeyComparator& compare);
60 
61     // Initialize an iterator over the specified collection.
62     // The returned iterator is not valid.
63     // explicit Iterator(const MemTableRep* collection);
~Iterator()64     ~Iterator() override{};
65 
66     // Returns true iff the iterator is positioned at a valid node.
67     bool Valid() const override;
68 
69     // Returns the key at the current position.
70     // REQUIRES: Valid()
71     const char* key() const override;
72 
73     // Advances to the next position.
74     // REQUIRES: Valid()
75     void Next() override;
76 
77     // Advances to the previous position.
78     // REQUIRES: Valid()
79     void Prev() override;
80 
81     // Advance to the first entry with a key >= target
82     void Seek(const Slice& user_key, const char* memtable_key) override;
83 
84     // Advance to the first entry with a key <= target
85     void SeekForPrev(const Slice& user_key, const char* memtable_key) override;
86 
87     // Position at the first entry in collection.
88     // Final state of iterator is Valid() iff collection is not empty.
89     void SeekToFirst() override;
90 
91     // Position at the last entry in collection.
92     // Final state of iterator is Valid() iff collection is not empty.
93     void SeekToLast() override;
94   };
95 
96   // Return an iterator over the keys in this representation.
97   MemTableRep::Iterator* GetIterator(Arena* arena) override;
98 
99  private:
100   friend class Iterator;
101   typedef std::vector<const char*> Bucket;
102   std::shared_ptr<Bucket> bucket_;
103   mutable port::RWMutex rwlock_;
104   bool immutable_;
105   bool sorted_;
106   const KeyComparator& compare_;
107 };
108 
Insert(KeyHandle handle)109 void VectorRep::Insert(KeyHandle handle) {
110   auto* key = static_cast<char*>(handle);
111   WriteLock l(&rwlock_);
112   assert(!immutable_);
113   bucket_->push_back(key);
114 }
115 
116 // Returns true iff an entry that compares equal to key is in the collection.
Contains(const char * key) const117 bool VectorRep::Contains(const char* key) const {
118   ReadLock l(&rwlock_);
119   return std::find(bucket_->begin(), bucket_->end(), key) != bucket_->end();
120 }
121 
MarkReadOnly()122 void VectorRep::MarkReadOnly() {
123   WriteLock l(&rwlock_);
124   immutable_ = true;
125 }
126 
ApproximateMemoryUsage()127 size_t VectorRep::ApproximateMemoryUsage() {
128   return
129     sizeof(bucket_) + sizeof(*bucket_) +
130     bucket_->size() *
131     sizeof(
132       std::remove_reference<decltype(*bucket_)>::type::value_type
133     );
134 }
135 
VectorRep(const KeyComparator & compare,Allocator * allocator,size_t count)136 VectorRep::VectorRep(const KeyComparator& compare, Allocator* allocator,
137                      size_t count)
138     : MemTableRep(allocator),
139       bucket_(new Bucket()),
140       immutable_(false),
141       sorted_(false),
142       compare_(compare) {
143   bucket_.get()->reserve(count);
144 }
145 
Iterator(class VectorRep * vrep,std::shared_ptr<std::vector<const char * >> bucket,const KeyComparator & compare)146 VectorRep::Iterator::Iterator(class VectorRep* vrep,
147                    std::shared_ptr<std::vector<const char*>> bucket,
148                    const KeyComparator& compare)
149 : vrep_(vrep),
150   bucket_(bucket),
151   cit_(bucket_->end()),
152   compare_(compare),
153   sorted_(false) { }
154 
DoSort() const155 void VectorRep::Iterator::DoSort() const {
156   // vrep is non-null means that we are working on an immutable memtable
157   if (!sorted_ && vrep_ != nullptr) {
158     WriteLock l(&vrep_->rwlock_);
159     if (!vrep_->sorted_) {
160       std::sort(bucket_->begin(), bucket_->end(), Compare(compare_));
161       cit_ = bucket_->begin();
162       vrep_->sorted_ = true;
163     }
164     sorted_ = true;
165   }
166   if (!sorted_) {
167     std::sort(bucket_->begin(), bucket_->end(), Compare(compare_));
168     cit_ = bucket_->begin();
169     sorted_ = true;
170   }
171   assert(sorted_);
172   assert(vrep_ == nullptr || vrep_->sorted_);
173 }
174 
175 // Returns true iff the iterator is positioned at a valid node.
Valid() const176 bool VectorRep::Iterator::Valid() const {
177   DoSort();
178   return cit_ != bucket_->end();
179 }
180 
181 // Returns the key at the current position.
182 // REQUIRES: Valid()
key() const183 const char* VectorRep::Iterator::key() const {
184   assert(sorted_);
185   return *cit_;
186 }
187 
188 // Advances to the next position.
189 // REQUIRES: Valid()
Next()190 void VectorRep::Iterator::Next() {
191   assert(sorted_);
192   if (cit_ == bucket_->end()) {
193     return;
194   }
195   ++cit_;
196 }
197 
198 // Advances to the previous position.
199 // REQUIRES: Valid()
Prev()200 void VectorRep::Iterator::Prev() {
201   assert(sorted_);
202   if (cit_ == bucket_->begin()) {
203     // If you try to go back from the first element, the iterator should be
204     // invalidated. So we set it to past-the-end. This means that you can
205     // treat the container circularly.
206     cit_ = bucket_->end();
207   } else {
208     --cit_;
209   }
210 }
211 
212 // Advance to the first entry with a key >= target
Seek(const Slice & user_key,const char * memtable_key)213 void VectorRep::Iterator::Seek(const Slice& user_key,
214                                const char* memtable_key) {
215   DoSort();
216   // Do binary search to find first value not less than the target
217   const char* encoded_key =
218       (memtable_key != nullptr) ? memtable_key : EncodeKey(&tmp_, user_key);
219   cit_ = std::equal_range(bucket_->begin(),
220                           bucket_->end(),
221                           encoded_key,
222                           [this] (const char* a, const char* b) {
223                             return compare_(a, b) < 0;
224                           }).first;
225 }
226 
227 // Advance to the first entry with a key <= target
SeekForPrev(const Slice &,const char *)228 void VectorRep::Iterator::SeekForPrev(const Slice& /*user_key*/,
229                                       const char* /*memtable_key*/) {
230   assert(false);
231 }
232 
233 // Position at the first entry in collection.
234 // Final state of iterator is Valid() iff collection is not empty.
SeekToFirst()235 void VectorRep::Iterator::SeekToFirst() {
236   DoSort();
237   cit_ = bucket_->begin();
238 }
239 
240 // Position at the last entry in collection.
241 // Final state of iterator is Valid() iff collection is not empty.
SeekToLast()242 void VectorRep::Iterator::SeekToLast() {
243   DoSort();
244   cit_ = bucket_->end();
245   if (bucket_->size() != 0) {
246     --cit_;
247   }
248 }
249 
Get(const LookupKey & k,void * callback_args,bool (* callback_func)(void * arg,const char * entry))250 void VectorRep::Get(const LookupKey& k, void* callback_args,
251                     bool (*callback_func)(void* arg, const char* entry)) {
252   rwlock_.ReadLock();
253   VectorRep* vector_rep;
254   std::shared_ptr<Bucket> bucket;
255   if (immutable_) {
256     vector_rep = this;
257   } else {
258     vector_rep = nullptr;
259     bucket.reset(new Bucket(*bucket_));  // make a copy
260   }
261   VectorRep::Iterator iter(vector_rep, immutable_ ? bucket_ : bucket, compare_);
262   rwlock_.ReadUnlock();
263 
264   for (iter.Seek(k.user_key(), k.memtable_key().data());
265        iter.Valid() && callback_func(callback_args, iter.key()); iter.Next()) {
266   }
267 }
268 
GetIterator(Arena * arena)269 MemTableRep::Iterator* VectorRep::GetIterator(Arena* arena) {
270   char* mem = nullptr;
271   if (arena != nullptr) {
272     mem = arena->AllocateAligned(sizeof(Iterator));
273   }
274   ReadLock l(&rwlock_);
275   // Do not sort here. The sorting would be done the first time
276   // a Seek is performed on the iterator.
277   if (immutable_) {
278     if (arena == nullptr) {
279       return new Iterator(this, bucket_, compare_);
280     } else {
281       return new (mem) Iterator(this, bucket_, compare_);
282     }
283   } else {
284     std::shared_ptr<Bucket> tmp;
285     tmp.reset(new Bucket(*bucket_)); // make a copy
286     if (arena == nullptr) {
287       return new Iterator(nullptr, tmp, compare_);
288     } else {
289       return new (mem) Iterator(nullptr, tmp, compare_);
290     }
291   }
292 }
293 } // anon namespace
294 
CreateMemTableRep(const MemTableRep::KeyComparator & compare,Allocator * allocator,const SliceTransform *,Logger *)295 MemTableRep* VectorRepFactory::CreateMemTableRep(
296     const MemTableRep::KeyComparator& compare, Allocator* allocator,
297     const SliceTransform*, Logger* /*logger*/) {
298   return new VectorRep(compare, allocator, count_);
299 }
300 }  // namespace ROCKSDB_NAMESPACE
301 #endif  // ROCKSDB_LITE
302