1 // Copyright 2016 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/trace_event/category_registry.h"
6 
7 #include <string.h>
8 
9 #include <type_traits>
10 
11 #include "base/debug/leak_annotations.h"
12 #include "base/logging.h"
13 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
14 
15 namespace base {
16 namespace trace_event {
17 
18 namespace {
19 
20 // |categories_| might end up causing creating dynamic initializers if not POD.
21 static_assert(std::is_pod<TraceCategory>::value, "TraceCategory must be POD");
22 
23 }  // namespace
24 
25 // static
26 TraceCategory CategoryRegistry::categories_[kMaxCategories] = {
27     INTERNAL_TRACE_LIST_BUILTIN_CATEGORIES(INTERNAL_TRACE_INIT_CATEGORY)};
28 
29 // static
30 base::subtle::AtomicWord CategoryRegistry::category_index_ =
31     BuiltinCategories::Size();
32 
33 // static
34 TraceCategory* const CategoryRegistry::kCategoryExhausted = &categories_[0];
35 TraceCategory* const CategoryRegistry::kCategoryAlreadyShutdown =
36     &categories_[1];
37 TraceCategory* const CategoryRegistry::kCategoryMetadata = &categories_[2];
38 
39 // static
Initialize()40 void CategoryRegistry::Initialize() {
41   // Trace is enabled or disabled on one thread while other threads are
42   // accessing the enabled flag. We don't care whether edge-case events are
43   // traced or not, so we allow races on the enabled flag to keep the trace
44   // macros fast.
45   for (size_t i = 0; i < kMaxCategories; ++i) {
46     ANNOTATE_BENIGN_RACE(categories_[i].state_ptr(),
47                          "trace_event category enabled");
48     // If this DCHECK is hit in a test it means that ResetForTesting() is not
49     // called and the categories state leaks between test fixtures.
50     DCHECK(!categories_[i].is_enabled());
51   }
52 }
53 
54 // static
ResetForTesting()55 void CategoryRegistry::ResetForTesting() {
56   // reset_for_testing clears up only the enabled state and filters. The
57   // categories themselves cannot be cleared up because the static pointers
58   // injected by the macros still point to them and cannot be reset.
59   for (size_t i = 0; i < kMaxCategories; ++i)
60     categories_[i].reset_for_testing();
61 }
62 
63 // static
GetCategoryByName(const char * category_name)64 TraceCategory* CategoryRegistry::GetCategoryByName(const char* category_name) {
65   DCHECK(!strchr(category_name, '"'))
66       << "Category names may not contain double quote";
67 
68   // The categories_ is append only, avoid using a lock for the fast path.
69   size_t category_index = base::subtle::Acquire_Load(&category_index_);
70 
71   // Search for pre-existing category group.
72   for (size_t i = 0; i < category_index; ++i) {
73     if (strcmp(categories_[i].name(), category_name) == 0) {
74       return &categories_[i];
75     }
76   }
77   return nullptr;
78 }
79 
GetOrCreateCategoryLocked(const char * category_name,CategoryInitializerFn category_initializer_fn,TraceCategory ** category)80 bool CategoryRegistry::GetOrCreateCategoryLocked(
81     const char* category_name,
82     CategoryInitializerFn category_initializer_fn,
83     TraceCategory** category) {
84   // This is the slow path: the lock is not held in the fastpath
85   // (GetCategoryByName), so more than one thread could have reached here trying
86   // to add the same category.
87   *category = GetCategoryByName(category_name);
88   if (*category)
89     return false;
90 
91   // Create a new category.
92   size_t category_index = base::subtle::Acquire_Load(&category_index_);
93   if (category_index >= kMaxCategories) {
94     NOTREACHED() << "must increase kMaxCategories";
95     *category = kCategoryExhausted;
96     return false;
97   }
98 
99   // TODO(primiano): this strdup should be removed. The only documented reason
100   // for it was TraceWatchEvent, which is gone. However, something might have
101   // ended up relying on this. Needs some auditing before removal.
102   const char* category_name_copy = strdup(category_name);
103   ANNOTATE_LEAKING_OBJECT_PTR(category_name_copy);
104 
105   *category = &categories_[category_index];
106   DCHECK(!(*category)->is_valid());
107   DCHECK(!(*category)->is_enabled());
108   (*category)->set_name(category_name_copy);
109   category_initializer_fn(*category);
110 
111   // Update the max index now.
112   base::subtle::Release_Store(&category_index_, category_index + 1);
113   return true;
114 }
115 
116 // static
GetCategoryByStatePtr(const uint8_t * category_state)117 const TraceCategory* CategoryRegistry::GetCategoryByStatePtr(
118     const uint8_t* category_state) {
119   const TraceCategory* category = TraceCategory::FromStatePtr(category_state);
120   DCHECK(IsValidCategoryPtr(category));
121   return category;
122 }
123 
124 // static
IsMetaCategory(const TraceCategory * category)125 bool CategoryRegistry::IsMetaCategory(const TraceCategory* category) {
126   DCHECK(IsValidCategoryPtr(category));
127   return category <= kCategoryMetadata;
128 }
129 
130 // static
GetAllCategories()131 CategoryRegistry::Range CategoryRegistry::GetAllCategories() {
132   // The |categories_| array is append only. We have to only guarantee to
133   // not return an index to a category which is being initialized by
134   // GetOrCreateCategoryByName().
135   size_t category_index = base::subtle::Acquire_Load(&category_index_);
136   return CategoryRegistry::Range(&categories_[0], &categories_[category_index]);
137 }
138 
139 // static
IsValidCategoryPtr(const TraceCategory * category)140 bool CategoryRegistry::IsValidCategoryPtr(const TraceCategory* category) {
141   // If any of these are hit, something has cached a corrupt category pointer.
142   uintptr_t ptr = reinterpret_cast<uintptr_t>(category);
143   return ptr % sizeof(void*) == 0 &&
144          ptr >= reinterpret_cast<uintptr_t>(&categories_[0]) &&
145          ptr <= reinterpret_cast<uintptr_t>(&categories_[kMaxCategories - 1]);
146 }
147 
148 }  // namespace trace_event
149 }  // namespace base
150