1 /*********************************************************************
2  *
3  * Copyright 2012 Intel Corporation
4  * Copyright 2012 VMware, Inc.
5  * All Rights Reserved.
6  *
7  * Permission is hereby granted, free of charge, to any person
8  * obtaining a copy of this software and associated documentation
9  * files (the "Software"), to deal in the Software without
10  * restriction, including without limitation the rights to use, copy,
11  * modify, merge, publish, distribute, sublicense, and/or sell copies
12  * of the Software, and to permit persons to whom the Software is
13  * furnished to do so, subject to the following conditions:
14  *
15  * The above copyright notice and this permission notice shall be
16  * included in all copies or substantial portions of the Software.
17  *
18  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21  * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
22  * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
23  * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
24  * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
25  * SOFTWARE.
26  *
27  *********************************************************************/
28 
29 #pragma once
30 
31 #include "trace_model.hpp"
32 
33 namespace trace {
34 
35 /* A set of call numbers.
36  *
37  * This was originally designed as a more efficient replacement for
38  * std::set<unsigned> which was used heavily within the trim code's
39  * TraceAnalyzer. This is quite similar to the trace::CallSet with the
40  * following differences:
41  *
42  *   Simplifications:
43  *
44  *	* There is no support here for the 'step' and 'freq' features
45  *	  supported by trace::CallSet.
46  *
47  *   Sophistications:
48  *
49  *	* This callset is implemented with a skip list for
50  *	  (probabilistically) logarithmic lookup times for
51  *	  out-of-order lookups.
52  *
53  *	* This callset optimizes the addition of new calls which are
54  *        within or adjacent to existing ranges, (by either doing
55  *        nothing, expanding the limits of an existing range, or also
56  *        merging two or more ranges).
57  *
58  * It would not be impossible to extend this code to support the
59  * missing features of trace::CallSet, (though the 'step' and 'freq'
60  * features would prevent some range-extending and merging
61  * optimizations in some cases).
62  */
63 
64 class FastCallRangePtr;
65 
66 class FastCallRange {
67 public:
68     CallNo first;
69     CallNo last;
70     int level;
71     std::vector<FastCallRangePtr> next;
72 
73     // (NOTE: Initalize ref_counter to 0 in all constructors)
74     FastCallRange(CallNo first, CallNo last, int level);
75 
76     bool contains(CallNo call_no) const;
77 
78 private:
79     friend class FastCallRangePtr;
80     size_t ref_counter;
81     // ref_counter must be initialized to 0 by all constructors
82     // ref_counter is the number of FastCallRangePtr objects that point at this
83 };
84 
85 class FastCallRangePtr {
86 public:
operator ->()87     FastCallRange* operator-> () { return  this->ptr; }
operator *()88     FastCallRange& operator * () { return *this->ptr; }
operator ()()89     FastCallRange* operator ()() { return  this->ptr; } // get pointer
90 
91 
FastCallRangePtr()92     FastCallRangePtr () : ptr(0) {}
FastCallRangePtr(FastCallRange * _ptr)93     FastCallRangePtr(FastCallRange* _ptr) : ptr(_ptr)
94                                  { if (this->ptr) ++this->ptr->ref_counter; }
95 
~FastCallRangePtr()96    ~FastCallRangePtr()           { if (this->ptr)
97                                        if (--this->ptr->ref_counter == 0)
98                                            delete this->ptr;
99                                  }
100 
FastCallRangePtr(FastCallRangePtr const & _ptr)101     FastCallRangePtr(FastCallRangePtr const& _ptr) : ptr(_ptr.ptr)
102                                  { if (this->ptr) ++this->ptr->ref_counter; }
103 
operator =(FastCallRangePtr const & new_ptr)104     FastCallRangePtr& operator= (FastCallRangePtr const& new_ptr)
105         {   // DO NOT CHANGE THE ORDER OF THESE STATEMENTS!
106             // (This order properly handles self-assignment)
107             // (This order also properly handles recursion, e.g.,
108             //  if a FastCallRange contains FastCallRangePtrs)
109             FastCallRange* const old_ptr = this->ptr;
110             this->ptr = new_ptr.ptr;
111             if (this->ptr)
112                 ++this->ptr->ref_counter;
113             if (old_ptr) {
114                 if (--old_ptr->ref_counter == 0)
115                     delete old_ptr;
116             }
117             return *this;
118         }
119 private:
120     FastCallRange* ptr;
121 };
122 
123 class FastCallSet {
124 public:
125     FastCallRange head;
126     int max_level;
127 
128     FastCallSet();
129 
130     bool empty(void) const;
131 
132     void add(CallNo first, CallNo last);
133 
134     void add(CallNo call_no);
135 
136     bool contains(CallNo call_no) const;
137 };
138 
139 } /* namespace trace */
140 
141