1 /*
2  * WriteMap.h
3  *
4  * This source file is part of the FoundationDB open source project
5  *
6  * Copyright 2013-2018 Apple Inc. and the FoundationDB project authors
7  *
8  * Licensed under the Apache License, Version 2.0 (the "License");
9  * you may not use this file except in compliance with the License.
10  * You may obtain a copy of the License at
11  *
12  *     http://www.apache.org/licenses/LICENSE-2.0
13  *
14  * Unless required by applicable law or agreed to in writing, software
15  * distributed under the License is distributed on an "AS IS" BASIS,
16  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17  * See the License for the specific language governing permissions and
18  * limitations under the License.
19  */
20 
21 #ifndef FDBCLIENT_WRITEMAP_H
22 #define FDBCLIENT_WRITEMAP_H
23 #pragma once
24 
25 #include "fdbclient/FDBTypes.h"
26 #include "fdbclient/VersionedMap.h"
27 #include "fdbclient/SnapshotCache.h"
28 #include "fdbclient/Atomic.h"
29 
30 struct RYWMutation {
31 	Optional<ValueRef> value;
32 	enum MutationRef::Type type;
33 
RYWMutationRYWMutation34 	RYWMutation(Optional<ValueRef> const& entry, MutationRef::Type type ) : value(entry), type(type) {}
RYWMutationRYWMutation35 	RYWMutation() : value(), type(MutationRef::NoOp) {}
36 
37 	bool operator == (const RYWMutation& r) const {
38 		return value == r.value && type == r.type;
39 	}
40 };
41 
42 class OperationStack {
43 	private:
44 		RYWMutation singletonOperation;
45 		Optional<std::vector<RYWMutation>> optionalOperations;
hasVector()46 		bool hasVector() const { return optionalOperations.present(); }
47 		bool defaultConstructed;
48 	public:
OperationStack()49 		OperationStack () { defaultConstructed = true; } // Don't use this!
OperationStack(RYWMutation initialEntry)50 		explicit OperationStack (RYWMutation initialEntry) { defaultConstructed = false; singletonOperation = initialEntry; }
reset(RYWMutation initialEntry)51 		void reset(RYWMutation initialEntry) { defaultConstructed = false; singletonOperation = initialEntry; optionalOperations = Optional<std::vector<RYWMutation>>(); }
poppush(RYWMutation entry)52 		void poppush(RYWMutation entry) { if(hasVector()) { optionalOperations.get().pop_back(); optionalOperations.get().push_back(entry); } else singletonOperation = entry; }
push(RYWMutation entry)53 		void push(RYWMutation entry) {
54 			if(defaultConstructed) {
55 				singletonOperation = entry;
56 				defaultConstructed = false;
57 			}
58 			else if(hasVector())
59 				optionalOperations.get().push_back(entry);
60 			else {
61 				optionalOperations = std::vector<RYWMutation>();
62 				optionalOperations.get().push_back(entry);
63 			}
64 		}
isDependent()65 		bool isDependent() const {
66 			if( !size() )
67 				return false;
68 			return singletonOperation.type != MutationRef::SetValue && singletonOperation.type != MutationRef::ClearRange && singletonOperation.type != MutationRef::SetVersionstampedValue && singletonOperation.type != MutationRef::SetVersionstampedKey;
69 		}
top()70 		const RYWMutation& top() const { return hasVector() ? optionalOperations.get().back() : singletonOperation; }
71 		RYWMutation& operator[] (int n) { return (n==0) ? singletonOperation : optionalOperations.get()[n-1]; }
72 
at(int n)73 		const RYWMutation& at(int n) const { return (n==0) ? singletonOperation : optionalOperations.get()[n-1]; }
74 
size()75 		int size() const { return defaultConstructed ? 0 : hasVector() ? optionalOperations.get().size() + 1 : 1; }
76 
77 		bool operator == (const OperationStack& r) const {
78 			if (size() != r.size())
79 				return false;
80 
81 			if (size() == 0)
82 				return true;
83 
84 			if (singletonOperation != r.singletonOperation)
85 				return false;
86 
87 			if (size() == 1)
88 				return true;
89 
90 			for (int i = 0; i < optionalOperations.get().size(); i++) {
91 				if (optionalOperations.get()[i] != r.optionalOperations.get()[i])
92 					return false;
93 			}
94 
95 			return true;
96 		}
97 };
98 
99 struct WriteMapEntry {
100 	KeyRef key;
101 	OperationStack stack;
102 	bool following_keys_cleared;
103 	bool following_keys_conflict;
104 	bool is_conflict;
105 	bool following_keys_unreadable;
106 	bool is_unreadable;
107 
WriteMapEntryWriteMapEntry108 	WriteMapEntry( KeyRef const& key, OperationStack && stack, bool following_keys_cleared, bool following_keys_conflict, bool is_conflict, bool following_keys_unreadable, bool is_unreadable ) : key(key), stack(std::move(stack)), following_keys_cleared(following_keys_cleared), following_keys_conflict(following_keys_conflict), is_conflict(is_conflict), following_keys_unreadable(following_keys_unreadable), is_unreadable(is_unreadable) {}
109 
toStringWriteMapEntry110 	std::string toString() const { return printable(key); }
111 };
112 
113 inline bool operator < ( const WriteMapEntry& lhs, const WriteMapEntry& rhs ) { return lhs.key < rhs.key; }
114 inline bool operator < ( const WriteMapEntry& lhs, const StringRef& rhs ) { return lhs.key < rhs; }
115 inline bool operator < ( const StringRef& lhs, const WriteMapEntry& rhs ) { return lhs < rhs.key; }
116 inline bool operator < ( const WriteMapEntry& lhs, const ExtStringRef& rhs ) { return rhs.cmp(lhs.key)>0; }
117 inline bool operator < ( const ExtStringRef& lhs, const WriteMapEntry& rhs ) { return lhs.cmp(rhs.key)<0; }
118 
119 class WriteMap {
120 private:
121 	typedef PTreeImpl::PTree< WriteMapEntry > PTreeT;
122 	typedef Reference<PTreeT> Tree;
123 
124 public:
WriteMap(Arena * arena)125 	explicit WriteMap(Arena* arena) : arena(arena), ver(-1), scratch_iterator(this), writeMapEmpty(true)  {
126 		PTreeImpl::insert( writes, ver, WriteMapEntry( allKeys.begin, OperationStack(), false, false, false, false, false ) );
127 		PTreeImpl::insert( writes, ver, WriteMapEntry( allKeys.end, OperationStack(), false, false, false, false, false ) );
128 		PTreeImpl::insert( writes, ver, WriteMapEntry( afterAllKeys, OperationStack(), false, false, false, false, false ) );
129 	}
130 
WriteMap(WriteMap && r)131 	WriteMap(WriteMap&& r) BOOST_NOEXCEPT : writeMapEmpty(r.writeMapEmpty), writes(std::move(r.writes)), ver(r.ver), scratch_iterator(std::move(r.scratch_iterator)), arena(r.arena) {}
132 	WriteMap& operator=(WriteMap&& r) BOOST_NOEXCEPT { writeMapEmpty = r.writeMapEmpty; writes = std::move(r.writes); ver = r.ver; scratch_iterator = std::move(r.scratch_iterator); arena = r.arena; return *this; }
133 
134 	//a write with addConflict false on top of an existing write with a conflict range will not remove the conflict
mutate(KeyRef key,MutationRef::Type operation,ValueRef param,bool addConflict)135 	void mutate( KeyRef key, MutationRef::Type operation, ValueRef param, bool addConflict ) {
136 		writeMapEmpty = false;
137 		auto& it = scratch_iterator;
138 		it.reset(writes, ver);
139 		it.skip( key );
140 
141 		bool is_cleared = it.entry().following_keys_cleared;
142 		bool following_conflict = it.entry().following_keys_conflict;
143 		bool is_conflict = addConflict || it.is_conflict_range();
144 		bool following_unreadable = it.entry().following_keys_unreadable;
145 		bool is_unreadable = it.is_unreadable() || operation == MutationRef::SetVersionstampedValue ||  operation == MutationRef::SetVersionstampedKey;
146 		bool is_dependent = operation != MutationRef::SetValue && operation != MutationRef::SetVersionstampedValue && operation != MutationRef::SetVersionstampedKey;
147 
148 		if (it.entry().key != key) {
149 			if( it.is_cleared_range() && is_dependent ) {
150 				it.tree.clear();
151 				OperationStack op( RYWMutation( Optional<StringRef>(), MutationRef::SetValue ) );
152 				coalesceOver(op, RYWMutation(param, operation), *arena);
153 				PTreeImpl::insert( writes, ver, WriteMapEntry( key, std::move(op), true, following_conflict, is_conflict, following_unreadable, is_unreadable ) );
154 			} else {
155 				it.tree.clear();
156 				PTreeImpl::insert( writes, ver, WriteMapEntry( key, OperationStack( RYWMutation( param, operation ) ), is_cleared, following_conflict, is_conflict, following_unreadable, is_unreadable ) );
157 			}
158 		} else {
159 			if( !it.is_unreadable() && operation == MutationRef::SetValue ) {
160 				it.tree.clear();
161 				PTreeImpl::remove( writes, ver, key );
162 				PTreeImpl::insert( writes, ver, WriteMapEntry( key, OperationStack( RYWMutation( param, operation ) ), is_cleared, following_conflict, is_conflict, following_unreadable, is_unreadable ) );
163 			} else {
164 				WriteMapEntry e( it.entry() );
165 				e.is_conflict = is_conflict;
166 				e.is_unreadable = is_unreadable;
167 				if (e.stack.size() == 0 && it.is_cleared_range() && is_dependent)  {
168 					e.stack.push(RYWMutation(Optional<StringRef>(), MutationRef::SetValue));
169 					coalesceOver(e.stack, RYWMutation(param, operation), *arena);
170 				} else if( !is_unreadable && e.stack.size() > 0 )
171 					coalesceOver( e.stack, RYWMutation( param, operation ), *arena );
172 				else
173 					e.stack.push( RYWMutation( param, operation ) );
174 
175 				it.tree.clear();
176 				PTreeImpl::remove( writes, ver, e.key ); // FIXME: Make PTreeImpl::insert do this automatically (see also VersionedMap.h FIXME)
177 				PTreeImpl::insert( writes, ver, std::move(e) );
178 			}
179 		}
180 	}
181 
clear(KeyRangeRef keys,bool addConflict)182 	void clear( KeyRangeRef keys, bool addConflict ) {
183 		writeMapEmpty = false;
184 		if( !addConflict ) {
185 			clearNoConflict( keys );
186 			return;
187 		}
188 
189 		auto& it = scratch_iterator;
190 		it.reset(writes, ver);
191 		it.skip( keys.begin );
192 
193 		bool insert_begin = !it.is_cleared_range() || !it.is_conflict_range() || it.is_unreadable();
194 
195 		if(it.endKey() == keys.end) {
196 			++it;
197 		} else if(it.endKey() < keys.end) {
198 			it.skip( keys.end );
199 		}
200 
201 		bool insert_end = ( it.is_unmodified_range() || !it.is_conflict_range() || it.is_unreadable() ) && ( !it.keyAtBegin() || it.beginKey() != keys.end );
202 		bool end_coalesce_clear = it.is_cleared_range() && it.beginKey() == keys.end && it.is_conflict_range() && !it.is_unreadable();
203 		bool end_conflict = it.is_conflict_range();
204 		bool end_cleared = it.is_cleared_range();
205 		bool end_unreadable = it.is_unreadable();
206 
207 		it.tree.clear();
208 
209 		PTreeImpl::remove( writes, ver, ExtStringRef(keys.begin, !insert_begin ? 1 : 0), ExtStringRef(keys.end, end_coalesce_clear ? 1 : 0) );
210 
211 		if (insert_begin)
212 			PTreeImpl::insert( writes, ver, WriteMapEntry( keys.begin, OperationStack(), true, true, true, false, false ) );
213 
214 		if (insert_end)
215 			PTreeImpl::insert( writes, ver, WriteMapEntry( keys.end, OperationStack(), end_cleared, end_conflict, end_conflict, end_unreadable, end_unreadable ) );
216 	}
217 
addUnmodifiedAndUnreadableRange(KeyRangeRef keys)218 	void addUnmodifiedAndUnreadableRange( KeyRangeRef keys ) {
219 		auto& it = scratch_iterator;
220 		it.reset(writes, ver);
221 		it.skip( keys.begin );
222 
223 		bool insert_begin = !it.is_unmodified_range() || it.is_conflict_range() || !it.is_unreadable();
224 
225 		if(it.endKey() == keys.end) {
226 			++it;
227 		} else if(it.endKey() < keys.end) {
228 			it.skip( keys.end );
229 		}
230 
231 		bool insert_end = ( it.is_cleared_range() || it.is_conflict_range() || !it.is_unreadable() ) && ( !it.keyAtBegin() || it.beginKey() != keys.end );
232 		bool end_coalesce_unmodified = it.is_unmodified_range() && it.beginKey() == keys.end && !it.is_conflict_range() && it.is_unreadable();
233 		bool end_conflict = it.is_conflict_range();
234 		bool end_cleared = it.is_cleared_range();
235 		bool end_unreadable = it.is_unreadable();
236 
237 		it.tree.clear();
238 
239 		PTreeImpl::remove( writes, ver, ExtStringRef(keys.begin, !insert_begin ? 1 : 0), ExtStringRef(keys.end, end_coalesce_unmodified ? 1 : 0) );
240 
241 		if (insert_begin)
242 			PTreeImpl::insert( writes, ver, WriteMapEntry( keys.begin, OperationStack(), false, false, false, true, true ) );
243 
244 		if (insert_end)
245 			PTreeImpl::insert( writes, ver, WriteMapEntry( keys.end, OperationStack(), end_cleared, end_conflict, end_conflict, end_unreadable, end_unreadable ) );
246 	}
247 
addConflictRange(KeyRangeRef keys)248 	void addConflictRange( KeyRangeRef keys ) {
249 		writeMapEmpty = false;
250 		auto& it = scratch_iterator;
251 		it.reset(writes, ver);
252 		it.skip( keys.begin );
253 
254 		std::vector<ExtStringRef> removals;
255 		std::vector<WriteMapEntry> insertions;
256 
257 		if( !it.entry().following_keys_conflict || !it.entry().is_conflict ) {
258 			if( it.keyAtBegin() && it.beginKey() == keys.begin ) {
259 				removals.push_back( keys.begin );
260 			}
261 			insertions.push_back( WriteMapEntry( keys.begin, it.is_operation() ? OperationStack( it.op() ) : OperationStack(), it.entry().following_keys_cleared, true, true, it.entry().following_keys_unreadable, it.entry().is_unreadable ) );
262 		}
263 
264 		while ( it.endKey() < keys.end ) {
265 			++it;
266 			if (it.keyAtBegin() && (!it.entry().following_keys_conflict || !it.entry().is_conflict) ) {
267 				WriteMapEntry e( it.entry() );
268 				e.following_keys_conflict = true;
269 				e.is_conflict = true;
270 				removals.push_back( e.key );
271 				insertions.push_back( std::move(e) );
272 			}
273 		}
274 
275 		ASSERT( it.beginKey() != keys.end );
276 		if( !it.entry().following_keys_conflict || !it.entry().is_conflict ) {
277 			bool isCleared = it.entry().following_keys_cleared;
278 			bool isUnreadable = it.entry().is_unreadable;
279 			bool followingUnreadable = it.entry().following_keys_unreadable;
280 			++it;
281 
282 			if ( !it.keyAtBegin() || it.beginKey() != keys.end ) {
283 				insertions.push_back(WriteMapEntry(keys.end, OperationStack(), isCleared, false, false, followingUnreadable, isUnreadable));
284 			}
285 		}
286 
287 		it.tree.clear();
288 
289 		//SOMEDAY: optimize this code by having a PTree removal/insertion that takes and returns an iterator
290 		for( int i = 0; i < removals.size(); i++ ) {
291 			PTreeImpl::remove( writes, ver, removals[i] ); // FIXME: Make PTreeImpl::insert do this automatically (see also VersionedMap.h FIXME)
292 		}
293 
294 		for( int i = 0; i < insertions.size(); i++ ) {
295 			PTreeImpl::insert( writes, ver, std::move(insertions[i]) );
296 		}
297 	}
298 
299 	struct iterator {
300 		// Iterates over three types of segments: unmodified ranges, cleared ranges, and modified keys.
301 		// Modified keys may be dependent (need to be collapsed with a snapshot value) or independent (value is known regardless of the snapshot value)
302 		// Every key will belong to exactly one segment.  The first segment begins at "" and the last segment ends at \xff\xff.
303 
iteratoriterator304 		explicit iterator( WriteMap* map ) : tree(map->writes), at( map->ver ), offset(false) { ++map->ver; }
305 			// Creates an iterator which is conceptually before the beginning of map (you may essentially only call skip() or ++ on it)
306 			// This iterator also represents a snapshot (will be unaffected by future writes)
307 
308 		enum SEGMENT_TYPE { UNMODIFIED_RANGE, CLEARED_RANGE, INDEPENDENT_WRITE, DEPENDENT_WRITE };
309 
typeiterator310 		SEGMENT_TYPE type() {
311 			if (offset)
312 				return entry().following_keys_cleared ? CLEARED_RANGE : UNMODIFIED_RANGE;
313 			else
314 				return entry().stack.isDependent() ? DEPENDENT_WRITE : INDEPENDENT_WRITE;
315 		}
is_cleared_rangeiterator316 		bool is_cleared_range() { return offset && entry().following_keys_cleared; }
is_unmodified_rangeiterator317 		bool is_unmodified_range() { return offset && !entry().following_keys_cleared; }
is_operationiterator318 		bool is_operation() { return !offset; }
is_conflict_rangeiterator319 		bool is_conflict_range() { return offset ? entry().following_keys_conflict : entry().is_conflict; }
is_unreadableiterator320 		bool is_unreadable() { return offset ? entry().following_keys_unreadable : entry().is_unreadable; }
321 
is_independentiterator322 		bool is_independent() { return entry().following_keys_cleared || !entry().stack.isDependent(); }  // Defined if is_operation()
323 
beginKeyiterator324 		ExtStringRef beginKey() { return ExtStringRef( entry().key, offset && entry().stack.size() ); }
endKeyiterator325 		ExtStringRef endKey() { return offset ? nextEntry().key : ExtStringRef( entry().key, 1 ); }
326 
opiterator327 		OperationStack const& op() { return entry().stack; }  // Only if is_operation()
328 
329 		iterator& operator++() {
330 			if (!offset && !equalsKeyAfter( entry().key, nextEntry().key )) {
331 				offset = true;
332 			} else {
333 				beginLen = endLen;
334 				finger.resize( beginLen );
335 				endLen = PTreeImpl::halfNext( at, finger );
336 				offset = !entry().stack.size();
337 			}
338 			return *this;
339 		}
340 		iterator& operator--() {
341 			if (offset && entry().stack.size() ) {
342 				offset = false;
343 			} else {
344 				endLen = beginLen;
345 				finger.resize(endLen);
346 				beginLen = PTreeImpl::halfPrevious( at, finger );
347 				offset = !entry().stack.size() || !equalsKeyAfter( entry().key, nextEntry().key );
348 			}
349 			return *this;
350 		}
351 		bool operator == ( const iterator& r ) const { return offset == r.offset && beginLen == r.beginLen && finger[beginLen-1] == r.finger[beginLen-1]; }
352 
skipiterator353 		void skip( KeyRef key ) {      // Changes *this to the segment containing key (so that beginKey()<=key && key < endKey())
354 			finger.clear();
355 
356 			if( key == allKeys.end )
357 				PTreeImpl::last(tree, at, finger);
358 			else
359 				PTreeImpl::upper_bound( tree, at, key, finger );
360 			endLen = finger.size();
361 			beginLen = PTreeImpl::halfPrevious( at, finger );
362 
363 			offset = !entry().stack.size() || (entry().key != key);
364 		}
365 	private:
366 		friend class WriteMap;
resetiterator367 		void reset( Tree const& tree, Version ver ) { this->tree = tree; this->at = ver; this->finger.clear(); beginLen=endLen=0; offset = false; }
368 
entryiterator369 		WriteMapEntry const& entry() { return finger[beginLen-1]->data; }
nextEntryiterator370 		WriteMapEntry const& nextEntry() { return finger[endLen-1]->data; }
371 
keyAtBeginiterator372 		bool keyAtBegin() { return !offset || !entry().stack.size(); }
373 
374 		Tree tree;
375 		Version at;
376 		int beginLen, endLen;
377 		vector< PTreeT const* > finger;
378 		bool offset;  // false-> the operation stack at entry(); true-> the following cleared or unmodified range
379 	};
380 
empty()381 	bool empty() const { return writeMapEmpty; }
382 
coalesce(RYWMutation existingEntry,RYWMutation newEntry,Arena & arena)383 	static RYWMutation coalesce(RYWMutation existingEntry, RYWMutation newEntry, Arena& arena) {
384 		ASSERT(newEntry.value.present());
385 
386 		if (newEntry.type == MutationRef::SetValue)
387 			return newEntry;
388 		else if (newEntry.type == MutationRef::AddValue) {
389 			switch(existingEntry.type) {
390 				case MutationRef::SetValue:
391 					return RYWMutation(doLittleEndianAdd(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
392 				case MutationRef::AddValue:
393 					return RYWMutation(doLittleEndianAdd(existingEntry.value, newEntry.value.get(), arena), MutationRef::AddValue);
394 				default:
395 					throw operation_failed();
396 			}
397 		} else if (newEntry.type == MutationRef::CompareAndClear) {
398 			switch (existingEntry.type) {
399 			case MutationRef::SetValue:
400 				if (doCompareAndClear(existingEntry.value, newEntry.value.get(), arena).present()) {
401 					return existingEntry;
402 				} else {
403 					return RYWMutation(Optional<ValueRef>(), MutationRef::SetValue);
404 				}
405 			default:
406 				throw operation_failed();
407 			}
408 		} else if (newEntry.type == MutationRef::AppendIfFits) {
409 			switch(existingEntry.type) {
410 				case MutationRef::SetValue:
411 					return RYWMutation(doAppendIfFits(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
412 				case MutationRef::AppendIfFits:
413 					return RYWMutation(doAppendIfFits(existingEntry.value, newEntry.value.get(), arena), MutationRef::AppendIfFits);
414 				default:
415 					throw operation_failed();
416 			}
417 		} else if (newEntry.type == MutationRef::And) {
418 			switch(existingEntry.type) {
419 				case MutationRef::SetValue:
420 					return RYWMutation(doAnd(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
421 				case MutationRef::And:
422 					return RYWMutation(doAnd(existingEntry.value, newEntry.value.get(), arena), MutationRef::And);
423 				default:
424 					throw operation_failed();
425 			}
426 		} else if (newEntry.type == MutationRef::Or) {
427 			switch(existingEntry.type) {
428 				case MutationRef::SetValue:
429 					return RYWMutation(doOr(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
430 				case MutationRef::Or:
431 					return RYWMutation(doOr(existingEntry.value, newEntry.value.get(), arena), MutationRef::Or);
432 				default:
433 					throw operation_failed();
434 			}
435 		} else if (newEntry.type == MutationRef::Xor) {
436 			switch(existingEntry.type) {
437 				case MutationRef::SetValue:
438 					return RYWMutation(doXor(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
439 				case MutationRef::Xor:
440 					return RYWMutation(doXor(existingEntry.value, newEntry.value.get(), arena), MutationRef::Xor);
441 				default:
442 					throw operation_failed();
443 			}
444 		} else if (newEntry.type == MutationRef::Max) {
445 			switch (existingEntry.type) {
446 			case MutationRef::SetValue:
447 				return RYWMutation(doMax(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
448 			case MutationRef::Max:
449 				return RYWMutation(doMax(existingEntry.value, newEntry.value.get(), arena), MutationRef::Max);
450 			default:
451 				throw operation_failed();
452 			}
453 		} else if (newEntry.type == MutationRef::Min) {
454 			switch (existingEntry.type) {
455 				case MutationRef::SetValue:
456 					return RYWMutation(doMin(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
457 				case MutationRef::Min:
458 					return RYWMutation(doMin(existingEntry.value, newEntry.value.get(), arena), MutationRef::Min);
459 				default:
460 					throw operation_failed();
461 			}
462 		} else if (newEntry.type == MutationRef::ByteMin) {
463 			switch (existingEntry.type) {
464 			case MutationRef::SetValue:
465 				return RYWMutation(doByteMin(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
466 			case MutationRef::ByteMin:
467 				return RYWMutation(doByteMin(existingEntry.value, newEntry.value.get(), arena), MutationRef::ByteMin);
468 			default:
469 				throw operation_failed();
470 			}
471 		} else if (newEntry.type == MutationRef::ByteMax) {
472 			switch (existingEntry.type) {
473 			case MutationRef::SetValue:
474 				return RYWMutation(doByteMax(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
475 			case MutationRef::ByteMax:
476 				return RYWMutation(doByteMax(existingEntry.value, newEntry.value.get(), arena), MutationRef::ByteMax);
477 			default:
478 				throw operation_failed();
479 			}
480 		} else if (newEntry.type == MutationRef::MinV2) {
481 			switch (existingEntry.type) {
482 			case MutationRef::SetValue:
483 				return RYWMutation(doMinV2(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
484 			case MutationRef::MinV2:
485 				return RYWMutation(doMinV2(existingEntry.value, newEntry.value.get(), arena), MutationRef::MinV2);
486 			default:
487 				throw operation_failed();
488 			}
489 		} else if (newEntry.type == MutationRef::AndV2) {
490 			switch (existingEntry.type) {
491 			case MutationRef::SetValue:
492 				return RYWMutation(doAndV2(existingEntry.value, newEntry.value.get(), arena), MutationRef::SetValue);
493 			case MutationRef::AndV2:
494 				return RYWMutation(doAndV2(existingEntry.value, newEntry.value.get(), arena), MutationRef::AndV2);
495 			default:
496 				throw operation_failed();
497 			}
498 		} else
499 			throw operation_failed();
500 	}
501 
coalesceOver(OperationStack & stack,RYWMutation newEntry,Arena & arena)502 	static void coalesceOver(OperationStack& stack, RYWMutation newEntry, Arena& arena) {
503 		RYWMutation existingEntry = stack.top();
504 		if (existingEntry.type == newEntry.type && newEntry.type != MutationRef::CompareAndClear) {
505 			if (isNonAssociativeOp(existingEntry.type) && existingEntry.value.present() && existingEntry.value.get().size() != newEntry.value.get().size()) {
506 				stack.push(newEntry);
507 			}
508 			else {
509 				stack.poppush(coalesce(existingEntry, newEntry, arena));
510 			}
511 		}
512 		else {
513 			if (isAtomicOp(newEntry.type) && isAtomicOp(existingEntry.type)) {
514 				stack.push(newEntry);
515 			}
516 			else {
517 				stack.poppush(coalesce(existingEntry, newEntry, arena));
518 			}
519 		}
520 	}
521 
coalesceUnder(OperationStack const & stack,Optional<ValueRef> const & value,Arena & arena)522 	static RYWMutation coalesceUnder(OperationStack const& stack, Optional<ValueRef> const& value, Arena& arena) {
523 		if( !stack.isDependent() && stack.size() == 1 )
524 			return stack.at(0);
525 
526 		RYWMutation currentEntry = RYWMutation( value, MutationRef::SetValue);
527 		for(int i = 0; i < stack.size(); ++i) {
528 			currentEntry = coalesce(currentEntry, stack.at(i), arena);
529 		}
530 
531 		return currentEntry;
532 	}
533 
534 private:
535 	friend class ReadYourWritesTransaction;
536 	Arena* arena;
537 	bool writeMapEmpty;
538 	Tree writes;
539 	Version ver;  // an internal version number for the tree - no connection to database versions!  Currently this is incremented after reads, so that consecutive writes have the same version and those separated by reads have different versions.
540 	iterator scratch_iterator;   // Avoid unnecessary memory allocation in write operations
541 
dump()542 	void dump() {
543 		iterator it( this );
544 		it.skip(allKeys.begin);
545 		while( it.beginKey() < allKeys.end ) {
546 			TraceEvent("WriteMapDump").detail("Begin", it.beginKey().toStandaloneStringRef())
547 				.detail("End", it.endKey())
548 				.detail("Cleared", it.is_cleared_range())
549 				.detail("Conflicted", it.is_conflict_range())
550 				.detail("Operation", it.is_operation())
551 				.detail("Unmodified", it.is_unmodified_range())
552 				.detail("Independent", it.is_operation() && it.is_independent())
553 				.detail("StackSize", it.is_operation() ? it.op().size() : 0);
554 			++it;
555 		}
556 	}
557 
558 	//SOMEDAY: clearNoConflict replaces cleared sets with two map entries for everyone one item cleared
clearNoConflict(KeyRangeRef keys)559 	void clearNoConflict( KeyRangeRef keys ) {
560 		auto& it = scratch_iterator;
561 		it.reset(writes, ver);
562 
563 		//Find all write conflict ranges within the cleared range
564 		it.skip( keys.begin );
565 
566 		bool insert_begin = !it.is_cleared_range() || it.is_unreadable();
567 
568 		bool lastConflicted = it.is_conflict_range();
569 
570 		bool conflicted = lastConflicted;
571 		std::vector<ExtStringRef> conflict_ranges;
572 
573 		if( insert_begin ) {
574 			conflict_ranges.push_back( keys.begin );
575 		} else {
576 			conflicted = !conflicted;
577 		}
578 
579 		while( it.endKey() < keys.end ) {
580 			++it;
581 			if ( lastConflicted != it.is_conflict_range() ) {
582 				conflict_ranges.push_back( it.beginKey() );
583 				lastConflicted = it.is_conflict_range();
584 			}
585 		}
586 
587 		if( it.endKey() == keys.end )
588 			++it;
589 
590 		ASSERT( it.beginKey() <= keys.end && keys.end < it.endKey() );
591 
592 		bool insert_end = ( ( it.is_unmodified_range() || it.is_unreadable() ) && ( !it.keyAtBegin() || it.beginKey() != keys.end ) ) || ( it.entry().is_conflict && !it.entry().following_keys_conflict && it.beginKey() == keys.end && !it.keyAtBegin() );
593 		bool end_cleared = it.is_cleared_range();
594 		bool end_coalesce_clear = it.is_cleared_range() && it.beginKey() == keys.end && it.is_conflict_range()==lastConflicted && !it.is_unreadable();
595 		bool end_conflict = it.is_conflict_range();
596 		bool end_unreadable = it.is_unreadable();
597 
598 		TEST( it.is_conflict_range() != lastConflicted );
599 
600 		it.tree.clear();
601 
602 		PTreeImpl::remove( writes, ver, ExtStringRef(keys.begin, !insert_begin ? 1 : 0), ExtStringRef(keys.end, end_coalesce_clear ? 1 : 0) );
603 
604 		for( int i = 0; i < conflict_ranges.size(); i++ ) {
605 			PTreeImpl::insert( writes, ver, WriteMapEntry( conflict_ranges[i].toArenaOrRef(*arena), OperationStack(), true, conflicted, conflicted, false, false ) );
606 			conflicted = !conflicted;
607 		}
608 
609 		ASSERT( conflicted != lastConflicted );
610 
611 		if (insert_end)
612 			PTreeImpl::insert( writes, ver, WriteMapEntry( keys.end, OperationStack(), end_cleared, end_conflict, end_conflict, end_unreadable, end_unreadable ) );
613 	}
614 };
615 
616 	/*
617 
618 		for write in writes:   # write.type in [ 'none', 'clear', 'independent', 'dependent' ]
619 			for read in reads[ write.begin : write.end ]:   # read.type in [ 'unknown', 'empty', 'value' ]
620 				if write.type == "none":
621 					yield read
622 				elif write.type == "clear":
623 					yield empty()
624 				elif write.type == "independent":
625 					yield value( write )
626 				else:  # Dependent write
627 					if read.type == "unknown":
628 						yield read
629 					else:
630 						yield value( collapse( read, write ) )
631 
632 	*/
633 
634 #endif
635