1 /////////////////////////////////////////////////////////////////////////////
2 // Copyright (c) Electronic Arts Inc. All rights reserved.
3 /////////////////////////////////////////////////////////////////////////////
4
5
6 #include "EASTLBenchmark.h"
7 #include "EASTLTest.h"
8 #include <EAStdC/EAStopwatch.h>
9 #include <EASTL/algorithm.h>
10 #include <EASTL/bonus/tuple_vector.h>
11 #include <EASTL/sort.h>
12
13 #ifdef _MSC_VER
14 #pragma warning(push, 0)
15 #pragma warning(disable: 4350)
16 #endif
17 #include <algorithm>
18 #include <vector>
19 #include <stdio.h>
20 #include <stdlib.h>
21 #ifdef _MSC_VER
22 #pragma warning(pop)
23 #endif
24
25
26 using namespace EA;
27
28
29 typedef std::vector<uint64_t> StdVectorUint64;
30 typedef eastl::tuple_vector<uint64_t> EaTupleVectorUint64;
31
32 struct PaddingStruct
33 {
34 char padding[56] = { 0 };
35 };
36 static const PaddingStruct DefaultPadding;
37 typedef eastl::tuple<uint64_t, PaddingStruct> PaddedTuple;
38 typedef std::vector<PaddedTuple> StdVectorUint64Padded;
39 typedef eastl::tuple_vector<uint64_t, PaddingStruct> EaTupleVectorUint64Padded;
40
41 namespace
42 {
43
44
45 //////////////////////////////////////////////////////////////////////////////
46 // MovableType
47 //
48 struct MovableType
49 {
50 int8_t* mpData;
51 enum { kDataSize = 128 };
52
MovableType__anonabd27c120111::MovableType53 MovableType() : mpData(new int8_t[kDataSize])
54 { memset(mpData, 0, kDataSize); }
55
MovableType__anonabd27c120111::MovableType56 MovableType(const MovableType& x) : mpData(new int8_t[kDataSize])
57 { memcpy(mpData, x.mpData, kDataSize); }
58
operator =__anonabd27c120111::MovableType59 MovableType& operator=(const MovableType& x)
60 {
61 if(!mpData)
62 mpData = new int8_t[kDataSize];
63 memcpy(mpData, x.mpData, kDataSize);
64 return *this;
65 }
66
67 #if EASTL_MOVE_SEMANTICS_ENABLED
MovableType__anonabd27c120111::MovableType68 MovableType(MovableType&& x) EA_NOEXCEPT : mpData(x.mpData)
69 { x.mpData = NULL; }
70
operator =__anonabd27c120111::MovableType71 MovableType& operator=(MovableType&& x)
72 {
73 eastl::swap(mpData, x.mpData); // In practice it may not be right to do a swap, depending on the case.
74 return *this;
75 }
76 #endif
77
~MovableType__anonabd27c120111::MovableType78 ~MovableType()
79 { delete[] mpData; }
80 };
81
82
83 //////////////////////////////////////////////////////////////////////////////
84 // AutoRefCount
85 //
86 // Basic ref-counted object.
87 //
88 template <typename T>
89 class AutoRefCount
90 {
91 public:
92 T* mpObject;
93
94 public:
AutoRefCount()95 AutoRefCount() EA_NOEXCEPT : mpObject(NULL)
96 {}
97
AutoRefCount(T * pObject)98 AutoRefCount(T* pObject) EA_NOEXCEPT : mpObject(pObject)
99 {
100 if(mpObject)
101 mpObject->AddRef();
102 }
103
AutoRefCount(T * pObject,int)104 AutoRefCount(T* pObject, int) EA_NOEXCEPT : mpObject(pObject)
105 {
106 // Inherit the existing refcount.
107 }
108
AutoRefCount(const AutoRefCount & x)109 AutoRefCount(const AutoRefCount& x) EA_NOEXCEPT : mpObject(x.mpObject)
110 {
111 if(mpObject)
112 mpObject->AddRef();
113 }
114
operator =(const AutoRefCount & x)115 AutoRefCount& operator=(const AutoRefCount& x)
116 {
117 return operator=(x.mpObject);
118 }
119
operator =(T * pObject)120 AutoRefCount& operator=(T* pObject)
121 {
122 if(pObject != mpObject)
123 {
124 T* const pTemp = mpObject; // Create temporary to prevent possible problems with re-entrancy.
125 if(pObject)
126 pObject->AddRef();
127 mpObject = pObject;
128 if(pTemp)
129 pTemp->Release();
130 }
131 return *this;
132 }
133
134 #if EASTL_MOVE_SEMANTICS_ENABLED
AutoRefCount(AutoRefCount && x)135 AutoRefCount(AutoRefCount&& x) EA_NOEXCEPT : mpObject(x.mpObject)
136 {
137 x.mpObject = NULL;
138 }
139
operator =(AutoRefCount && x)140 AutoRefCount& operator=(AutoRefCount&& x)
141 {
142 if(mpObject)
143 mpObject->Release();
144 mpObject = x.mpObject;
145 x.mpObject = NULL;
146 return *this;
147 }
148 #endif
149
~AutoRefCount()150 ~AutoRefCount()
151 {
152 if(mpObject)
153 mpObject->Release();
154 }
155
operator *() const156 T& operator *() const EA_NOEXCEPT
157 { return *mpObject; }
158
operator ->() const159 T* operator ->() const EA_NOEXCEPT
160 { return mpObject; }
161
operator T*() const162 operator T*() const EA_NOEXCEPT
163 { return mpObject; }
164
165 }; // class AutoRefCount
166
167
168 struct RefCounted
169 {
170 int mRefCount;
171 static int msAddRefCount;
172 static int msReleaseCount;
173
RefCounted__anonabd27c120111::RefCounted174 RefCounted() : mRefCount(1) {}
175
AddRef__anonabd27c120111::RefCounted176 int AddRef()
177 { ++msAddRefCount; return ++mRefCount; }
178
Release__anonabd27c120111::RefCounted179 int Release()
180 {
181 ++msReleaseCount;
182 if(mRefCount > 1)
183 return --mRefCount;
184 delete this;
185 return 0;
186 }
187 };
188
189 int RefCounted::msAddRefCount = 0;
190 int RefCounted::msReleaseCount = 0;
191
192 } // namespace
193
194
195 namespace
196 {
197 template <typename Container>
TestPushBack(EA::StdC::Stopwatch & stopwatch,Container & c,eastl::vector<uint32_t> & intVector)198 void TestPushBack(EA::StdC::Stopwatch& stopwatch, Container& c, eastl::vector<uint32_t>& intVector)
199 {
200 stopwatch.Restart();
201 for(eastl_size_t j = 0, jEnd = intVector.size(); j < jEnd; j++)
202 c.push_back((uint64_t)intVector[j]);
203 stopwatch.Stop();
204 }
205
206
207 template <typename Container>
TestBracket(EA::StdC::Stopwatch & stopwatch,Container & c)208 void TestBracket(EA::StdC::Stopwatch& stopwatch, Container& c)
209 {
210 uint64_t temp = 0;
211 stopwatch.Restart();
212 for(typename Container::size_type j = 0, jEnd = c.size(); j < jEnd; j++)
213 temp += c[j];
214 stopwatch.Stop();
215 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(temp & 0xffffffff));
216 }
217
TestBracket(EA::StdC::Stopwatch & stopwatch,EaTupleVectorUint64 & c)218 void TestBracket(EA::StdC::Stopwatch& stopwatch, EaTupleVectorUint64& c)
219 {
220 uint64_t temp = 0;
221 stopwatch.Restart();
222 for (typename EaTupleVectorUint64::size_type j = 0, jEnd = c.size(); j < jEnd; j++)
223 temp += eastl::get<0>(c[j]);
224 stopwatch.Stop();
225 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(temp & 0xffffffff));
226 }
227
228 template <typename Container>
TestFind(EA::StdC::Stopwatch & stopwatch,Container & c)229 void TestFind(EA::StdC::Stopwatch& stopwatch, Container& c)
230 {
231 stopwatch.Restart();
232 typedef typename Container::iterator iterator_t; // This typedef is required to get this code to compile on RVCT
233 iterator_t it = eastl::find(c.begin(), c.end(), UINT64_C(0xffffffffffff));
234 stopwatch.Stop();
235 if(it != c.end())
236 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)*it);
237 }
238
TestFind(EA::StdC::Stopwatch & stopwatch,EaTupleVectorUint64 & c)239 void TestFind(EA::StdC::Stopwatch& stopwatch, EaTupleVectorUint64& c)
240 {
241 eastl::tuple<uint64_t> val(0xffffffffffff);
242 stopwatch.Restart();
243 EaTupleVectorUint64::iterator it = eastl::find(c.begin(), c.end(), val);
244 stopwatch.Stop();
245 if (it != c.end())
246 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)eastl::get<0>(*it));
247 }
248
249 template <typename Container>
TestSort(EA::StdC::Stopwatch & stopwatch,Container & c)250 void TestSort(EA::StdC::Stopwatch& stopwatch, Container& c)
251 {
252 // Intentionally use eastl sort in order to measure just
253 // vector access speed and not be polluted by sort speed.
254 stopwatch.Restart();
255 eastl::quick_sort(c.begin(), c.end());
256 stopwatch.Stop();
257 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(c[0] & 0xffffffff));
258 }
259
TestSort(EA::StdC::Stopwatch & stopwatch,EaTupleVectorUint64 & c)260 void TestSort(EA::StdC::Stopwatch& stopwatch, EaTupleVectorUint64& c)
261 {
262 // Intentionally use eastl sort in order to measure just
263 // vector access speed and not be polluted by sort speed.
264 stopwatch.Restart();
265 eastl::quick_sort(c.begin(), c.end());
266 stopwatch.Stop();
267 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(eastl::get<0>(c[0]) & 0xffffffff));
268 }
269
270
271 template <typename Container>
TestInsert(EA::StdC::Stopwatch & stopwatch,Container & c)272 void TestInsert(EA::StdC::Stopwatch& stopwatch, Container& c)
273 {
274 typename Container::size_type j, jEnd;
275 typename Container::iterator it;
276
277 stopwatch.Restart();
278 for(j = 0, jEnd = 100, it = c.begin(); j < jEnd; ++j)
279 {
280 it = c.insert(it, UINT64_C(0xffffffffffff));
281
282 if(it == c.end()) // Try to safely increment the iterator three times.
283 it = c.begin();
284 if(++it == c.end())
285 it = c.begin();
286 if(++it == c.end())
287 it = c.begin();
288 }
289 stopwatch.Stop();
290 }
291
292
293 template <typename Container>
TestErase(EA::StdC::Stopwatch & stopwatch,Container & c)294 void TestErase(EA::StdC::Stopwatch& stopwatch, Container& c)
295 {
296 typename Container::size_type j, jEnd;
297 typename Container::iterator it;
298
299 stopwatch.Restart();
300 for(j = 0, jEnd = 100, it = c.begin(); j < jEnd; ++j)
301 {
302 it = c.erase(it);
303
304 if(it == c.end()) // Try to safely increment the iterator three times.
305 it = c.begin();
306 if(++it == c.end())
307 it = c.begin();
308 if(++it == c.end())
309 it = c.begin();
310 }
311 stopwatch.Stop();
312 }
313
314
315 template <typename Container>
TestMoveReallocate(EA::StdC::Stopwatch & stopwatch,Container & c)316 void TestMoveReallocate(EA::StdC::Stopwatch& stopwatch, Container& c)
317 {
318 stopwatch.Restart();
319 while(c.size() < 8192)
320 c.resize(c.capacity() + 1);
321 stopwatch.Stop();
322 }
323
324
325 template <typename Container>
TestMoveErase(EA::StdC::Stopwatch & stopwatch,Container & c)326 void TestMoveErase(EA::StdC::Stopwatch& stopwatch, Container& c)
327 {
328 stopwatch.Restart();
329 while(!c.empty())
330 c.erase(c.begin());
331 stopwatch.Stop();
332 }
333
334 //////////////////////////////////////////////////////////////////////////
335 // Variations of test functions for the Padded structures
336 template <typename Container>
TestTuplePushBack(EA::StdC::Stopwatch & stopwatch,Container & c,eastl::vector<uint32_t> & intVector)337 void TestTuplePushBack(EA::StdC::Stopwatch& stopwatch, Container& c, eastl::vector<uint32_t>& intVector)
338 {
339 stopwatch.Restart();
340 for (eastl_size_t j = 0, jEnd = intVector.size(); j < jEnd; j++)
341 {
342 PaddedTuple tup((uint64_t)intVector[j], DefaultPadding);
343 c.push_back(tup);
344 }
345 stopwatch.Stop();
346 }
347
348
349 template <typename Container>
TestTupleBracket(EA::StdC::Stopwatch & stopwatch,Container & c)350 void TestTupleBracket(EA::StdC::Stopwatch& stopwatch, Container& c)
351 {
352 uint64_t temp = 0;
353 stopwatch.Restart();
354 for (typename Container::size_type j = 0, jEnd = c.size(); j < jEnd; j++)
355 temp += eastl::get<0>(c[j]);
356 stopwatch.Stop();
357 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(temp & 0xffffffff));
358 }
359
360
361 template <typename Container>
TestTupleFind(EA::StdC::Stopwatch & stopwatch,Container & c)362 void TestTupleFind(EA::StdC::Stopwatch& stopwatch, Container& c)
363 {
364 stopwatch.Restart();
365 typedef typename Container::iterator iterator_t; // This typedef is required to get this code to compile on RVCT
366 iterator_t it = eastl::find_if(c.begin(), c.end(), [](auto tup) { return eastl::get<0>(tup) == 0xFFFFFFFF; });
367 stopwatch.Stop();
368 if (it != c.end())
369 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)eastl::get<0>(*it));
370 }
371
372 template <typename Container>
TestTupleSort(EA::StdC::Stopwatch & stopwatch,Container & c)373 void TestTupleSort(EA::StdC::Stopwatch& stopwatch, Container& c)
374 {
375 // Intentionally use eastl sort in order to measure just
376 // vector access speed and not be polluted by sort speed.
377 stopwatch.Restart();
378 eastl::quick_sort(c.begin(), c.end(), [](auto a, auto b) { return eastl::get<0>(a) < eastl::get<0>(b); });
379 stopwatch.Stop();
380 sprintf(Benchmark::gScratchBuffer, "%u", (unsigned)(eastl::get<0>(c[0]) & 0xffffffff));
381 }
382
383 template <typename Container>
TestTupleInsert(EA::StdC::Stopwatch & stopwatch,Container & c)384 void TestTupleInsert(EA::StdC::Stopwatch& stopwatch, Container& c)
385 {
386 typename Container::size_type j, jEnd;
387 typename Container::iterator it;
388 PaddedTuple tup(0xFFFFFFFF, DefaultPadding);
389
390 stopwatch.Restart();
391 for (j = 0, jEnd = 100, it = c.begin(); j < jEnd; ++j)
392 {
393 it = c.insert(it, tup);
394
395 if (it == c.end()) // Try to safely increment the iterator three times.
396 it = c.begin();
397 if (++it == c.end())
398 it = c.begin();
399 if (++it == c.end())
400 it = c.begin();
401 }
402 stopwatch.Stop();
403 }
404
405 template <typename Container>
TestTupleErase(EA::StdC::Stopwatch & stopwatch,Container & c)406 void TestTupleErase(EA::StdC::Stopwatch& stopwatch, Container& c)
407 {
408 typename Container::size_type j, jEnd;
409 typename Container::iterator it;
410
411 stopwatch.Restart();
412 for (j = 0, jEnd = 100, it = c.begin(); j < jEnd; ++j)
413 {
414 it = c.erase(it);
415
416 if (it == c.end()) // Try to safely increment the iterator three times.
417 it = c.begin();
418 if (++it == c.end())
419 it = c.begin();
420 if (++it == c.end())
421 it = c.begin();
422 }
423 stopwatch.Stop();
424 }
425
426 } // namespace
427
428
429
430
431
BenchmarkTupleVector()432 void BenchmarkTupleVector()
433 {
434 EASTLTest_Printf("TupleVector\n");
435
436 EA::UnitTest::RandGenT<uint32_t> rng(EA::UnitTest::GetRandSeed());
437 EA::StdC::Stopwatch stopwatch1(EA::StdC::Stopwatch::kUnitsCPUCycles);
438 EA::StdC::Stopwatch stopwatch2(EA::StdC::Stopwatch::kUnitsCPUCycles);
439
440 {
441 eastl::vector<uint32_t> intVector(100000);
442 eastl::generate(intVector.begin(), intVector.end(), rng);
443
444 for(int i = 0; i < 2; i++)
445 {
446 StdVectorUint64 stdVectorUint64;
447 EaTupleVectorUint64 eaTupleVectorUint64;
448
449
450 ///////////////////////////////
451 // Test push_back
452 ///////////////////////////////
453
454 TestPushBack(stopwatch1, stdVectorUint64, intVector);
455 TestPushBack(stopwatch2, eaTupleVectorUint64, intVector);
456
457 if(i == 1)
458 Benchmark::AddResult("tuple_vector<uint64>/push_back", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
459
460
461 ///////////////////////////////
462 // Test operator[].
463 ///////////////////////////////
464
465 TestBracket(stopwatch1, stdVectorUint64);
466 TestBracket(stopwatch2, eaTupleVectorUint64);
467
468 if(i == 1)
469 Benchmark::AddResult("tuple_vector<uint64>/operator[]", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
470
471
472 ///////////////////////////////
473 // Test iteration via find().
474 ///////////////////////////////
475
476 TestFind(stopwatch1, stdVectorUint64);
477 TestFind(stopwatch2, eaTupleVectorUint64);
478 TestFind(stopwatch1, stdVectorUint64);
479 TestFind(stopwatch2, eaTupleVectorUint64);
480
481 if(i == 1)
482 Benchmark::AddResult("tuple_vector<uint64>/iteration", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
483
484
485 ///////////////////////////////
486 // Test sort
487 ///////////////////////////////
488
489 // Currently VC++ complains about our sort function decrementing std::iterator that is already at begin(). In the strictest sense,
490 // that's a valid complaint, but we aren't testing std STL here. We will want to revise our sort function eventually.
491 #if !defined(_MSC_VER) || !defined(_ITERATOR_DEBUG_LEVEL) || (_ITERATOR_DEBUG_LEVEL < 2)
492 TestSort(stopwatch1, stdVectorUint64);
493 TestSort(stopwatch2, eaTupleVectorUint64);
494
495 if(i == 1)
496 Benchmark::AddResult("tuple_vector<uint64>/sort", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
497 #endif
498
499 ///////////////////////////////
500 // Test insert
501 ///////////////////////////////
502
503 TestInsert(stopwatch1, stdVectorUint64);
504 TestInsert(stopwatch2, eaTupleVectorUint64);
505
506 if(i == 1)
507 Benchmark::AddResult("tuple_vector<uint64>/insert", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
508
509
510 ///////////////////////////////
511 // Test erase
512 ///////////////////////////////
513
514 TestErase(stopwatch1, stdVectorUint64);
515 TestErase(stopwatch2, eaTupleVectorUint64);
516
517 if(i == 1)
518 Benchmark::AddResult("tuple_vector<uint64>/erase", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
519
520
521 ///////////////////////////////////////////
522 // Test move of MovableType
523 // Should be much faster with C++11 move.
524 ///////////////////////////////////////////
525
526 std::vector<MovableType> stdVectorMovableType;
527 eastl::tuple_vector<MovableType> eaTupleVectorMovableType;
528
529 TestMoveReallocate(stopwatch1, stdVectorMovableType);
530 TestMoveReallocate(stopwatch2, eaTupleVectorMovableType);
531
532 if(i == 1)
533 Benchmark::AddResult("tuple_vector<MovableType>/reallocate", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
534
535
536 TestMoveErase(stopwatch1, stdVectorMovableType);
537 TestMoveErase(stopwatch2, eaTupleVectorMovableType);
538
539 if(i == 1)
540 Benchmark::AddResult("tuple_vector<MovableType>/erase", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
541
542
543 ///////////////////////////////////////////
544 // Test move of AutoRefCount
545 // Should be much faster with C++11 move.
546 ///////////////////////////////////////////
547
548 std::vector<AutoRefCount<RefCounted> > stdVectorAutoRefCount;
549 eastl::tuple_vector<AutoRefCount<RefCounted> > eaTupleVectorAutoRefCount;
550
551 for(size_t a = 0; a < 2048; a++)
552 {
553 stdVectorAutoRefCount.push_back(AutoRefCount<RefCounted>(new RefCounted));
554 eaTupleVectorAutoRefCount.push_back(AutoRefCount<RefCounted>(new RefCounted));
555 }
556
557 RefCounted::msAddRefCount = 0;
558 RefCounted::msReleaseCount = 0;
559 TestMoveErase(stopwatch1, stdVectorAutoRefCount);
560 //EASTLTest_Printf("tuple_vector<AutoRefCount>/erase std counts: %d %d\n", RefCounted::msAddRefCount, RefCounted::msReleaseCount);
561
562 RefCounted::msAddRefCount = 0;
563 RefCounted::msReleaseCount = 0;
564 TestMoveErase(stopwatch2, eaTupleVectorAutoRefCount);
565 //EASTLTest_Printf("tuple_vector<AutoRefCount>/erase EA counts: %d %d\n", RefCounted::msAddRefCount, RefCounted::msReleaseCount);
566
567 if(i == 1)
568 Benchmark::AddResult("tuple_vector<AutoRefCount>/erase", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
569
570
571 //////////////////////////////////////////////////////////////////////////
572 // Test various operations with "padded" data, to demonstrate access/modification of sparse data
573
574 StdVectorUint64Padded stdVectorUint64Padded;
575 EaTupleVectorUint64Padded eaTupleVectorUint64Padded;
576
577 ///////////////////////////////
578 // Test push_back
579 ///////////////////////////////
580
581 TestTuplePushBack(stopwatch1, stdVectorUint64Padded, intVector);
582 TestTuplePushBack(stopwatch2, eaTupleVectorUint64Padded, intVector);
583
584 if(i == 1)
585 Benchmark::AddResult("tuple_vector<uint64,Padding>/push_back", stopwatch1.GetUnits(),
586 stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
587
588
589 ///////////////////////////////
590 // Test operator[].
591 ///////////////////////////////
592
593 TestTupleBracket(stopwatch1, stdVectorUint64Padded);
594 TestTupleBracket(stopwatch2, eaTupleVectorUint64Padded);
595
596 if(i == 1)
597 Benchmark::AddResult("tuple_vector<uint64,Padding>/operator[]", stopwatch1.GetUnits(),
598 stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
599
600
601 ///////////////////////////////
602 // Test iteration via find().
603 ///////////////////////////////
604
605 TestTupleFind(stopwatch1, stdVectorUint64Padded);
606 TestTupleFind(stopwatch2, eaTupleVectorUint64Padded);
607 TestTupleFind(stopwatch1, stdVectorUint64Padded);
608 TestTupleFind(stopwatch2, eaTupleVectorUint64Padded);
609
610 if(i == 1)
611 Benchmark::AddResult("tuple_vector<uint64,Padding>/iteration", stopwatch1.GetUnits(),
612 stopwatch1.GetElapsedTime(), stopwatch2.GetElapsedTime());
613
614
615 ///////////////////////////////
616 // Test sort
617 ///////////////////////////////
618
619 // Currently VC++ complains about our sort function decrementing std::iterator that is already at
620 // begin(). In the strictest sense, that's a valid complaint, but we aren't testing std STL here. We
621 // will want to revise our sort function eventually.
622 #if !defined(_MSC_VER) || !defined(_ITERATOR_DEBUG_LEVEL) || (_ITERATOR_DEBUG_LEVEL < 2)
623 TestTupleSort(stopwatch1, stdVectorUint64Padded);
624 TestTupleSort(stopwatch2, eaTupleVectorUint64Padded);
625
626 if(i == 1)
627 Benchmark::AddResult("tuple_vector<uint64,Padding>/sort", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(),
628 stopwatch2.GetElapsedTime());
629 #endif
630
631 ///////////////////////////////
632 // Test insert
633 ///////////////////////////////
634
635 TestTupleInsert(stopwatch1, stdVectorUint64Padded);
636 TestTupleInsert(stopwatch2, eaTupleVectorUint64Padded);
637
638 if(i == 1)
639 Benchmark::AddResult("tuple_vector<uint64,Padding>/insert", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(),
640 stopwatch2.GetElapsedTime());
641
642
643 ///////////////////////////////
644 // Test erase
645 ///////////////////////////////
646
647 TestTupleErase(stopwatch1, stdVectorUint64Padded);
648 TestTupleErase(stopwatch2, eaTupleVectorUint64Padded);
649
650 if(i == 1)
651 Benchmark::AddResult("tuple_vector<uint64,Padding>/erase", stopwatch1.GetUnits(), stopwatch1.GetElapsedTime(),
652 stopwatch2.GetElapsedTime());
653 }
654 }
655 }
656
657
658
659
660
661
662
663
664
665
666
667
668