1 //===-- tsan_interface_ann.cpp --------------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 //
11 //===----------------------------------------------------------------------===//
12 #include "sanitizer_common/sanitizer_libc.h"
13 #include "sanitizer_common/sanitizer_internal_defs.h"
14 #include "sanitizer_common/sanitizer_placement_new.h"
15 #include "sanitizer_common/sanitizer_stacktrace.h"
16 #include "sanitizer_common/sanitizer_vector.h"
17 #include "tsan_interface_ann.h"
18 #include "tsan_mutex.h"
19 #include "tsan_report.h"
20 #include "tsan_rtl.h"
21 #include "tsan_mman.h"
22 #include "tsan_flags.h"
23 #include "tsan_platform.h"
24 
25 #define CALLERPC ((uptr)__builtin_return_address(0))
26 
27 using namespace __tsan;
28 
29 namespace __tsan {
30 
31 class ScopedAnnotation {
32  public:
ScopedAnnotation(ThreadState * thr,const char * aname,uptr pc)33   ScopedAnnotation(ThreadState *thr, const char *aname, uptr pc)
34       : thr_(thr) {
35     FuncEntry(thr_, pc);
36     DPrintf("#%d: annotation %s()\n", thr_->tid, aname);
37   }
38 
~ScopedAnnotation()39   ~ScopedAnnotation() {
40     FuncExit(thr_);
41     CheckNoLocks(thr_);
42   }
43  private:
44   ThreadState *const thr_;
45 };
46 
47 #define SCOPED_ANNOTATION_RET(typ, ret) \
48     if (!flags()->enable_annotations) \
49       return ret; \
50     ThreadState *thr = cur_thread(); \
51     const uptr caller_pc = (uptr)__builtin_return_address(0); \
52     StatInc(thr, StatAnnotation); \
53     StatInc(thr, Stat##typ); \
54     ScopedAnnotation sa(thr, __func__, caller_pc); \
55     const uptr pc = StackTrace::GetCurrentPc(); \
56     (void)pc; \
57 /**/
58 
59 #define SCOPED_ANNOTATION(typ) SCOPED_ANNOTATION_RET(typ, )
60 
61 static const int kMaxDescLen = 128;
62 
63 struct ExpectRace {
64   ExpectRace *next;
65   ExpectRace *prev;
66   atomic_uintptr_t hitcount;
67   atomic_uintptr_t addcount;
68   uptr addr;
69   uptr size;
70   char *file;
71   int line;
72   char desc[kMaxDescLen];
73 };
74 
75 struct DynamicAnnContext {
76   Mutex mtx;
77   ExpectRace expect;
78   ExpectRace benign;
79 
DynamicAnnContext__tsan::DynamicAnnContext80   DynamicAnnContext()
81     : mtx(MutexTypeAnnotations, StatMtxAnnotations) {
82   }
83 };
84 
85 static DynamicAnnContext *dyn_ann_ctx;
86 static char dyn_ann_ctx_placeholder[sizeof(DynamicAnnContext)] ALIGNED(64);
87 
AddExpectRace(ExpectRace * list,char * f,int l,uptr addr,uptr size,char * desc)88 static void AddExpectRace(ExpectRace *list,
89     char *f, int l, uptr addr, uptr size, char *desc) {
90   ExpectRace *race = list->next;
91   for (; race != list; race = race->next) {
92     if (race->addr == addr && race->size == size) {
93       atomic_store_relaxed(&race->addcount,
94           atomic_load_relaxed(&race->addcount) + 1);
95       return;
96     }
97   }
98   race = (ExpectRace*)internal_alloc(MBlockExpectRace, sizeof(ExpectRace));
99   race->addr = addr;
100   race->size = size;
101   race->file = f;
102   race->line = l;
103   race->desc[0] = 0;
104   atomic_store_relaxed(&race->hitcount, 0);
105   atomic_store_relaxed(&race->addcount, 1);
106   if (desc) {
107     int i = 0;
108     for (; i < kMaxDescLen - 1 && desc[i]; i++)
109       race->desc[i] = desc[i];
110     race->desc[i] = 0;
111   }
112   race->prev = list;
113   race->next = list->next;
114   race->next->prev = race;
115   list->next = race;
116 }
117 
FindRace(ExpectRace * list,uptr addr,uptr size)118 static ExpectRace *FindRace(ExpectRace *list, uptr addr, uptr size) {
119   for (ExpectRace *race = list->next; race != list; race = race->next) {
120     uptr maxbegin = max(race->addr, addr);
121     uptr minend = min(race->addr + race->size, addr + size);
122     if (maxbegin < minend)
123       return race;
124   }
125   return 0;
126 }
127 
CheckContains(ExpectRace * list,uptr addr,uptr size)128 static bool CheckContains(ExpectRace *list, uptr addr, uptr size) {
129   ExpectRace *race = FindRace(list, addr, size);
130   if (race == 0)
131     return false;
132   DPrintf("Hit expected/benign race: %s addr=%zx:%d %s:%d\n",
133       race->desc, race->addr, (int)race->size, race->file, race->line);
134   atomic_fetch_add(&race->hitcount, 1, memory_order_relaxed);
135   return true;
136 }
137 
InitList(ExpectRace * list)138 static void InitList(ExpectRace *list) {
139   list->next = list;
140   list->prev = list;
141 }
142 
InitializeDynamicAnnotations()143 void InitializeDynamicAnnotations() {
144   dyn_ann_ctx = new(dyn_ann_ctx_placeholder) DynamicAnnContext;
145   InitList(&dyn_ann_ctx->expect);
146   InitList(&dyn_ann_ctx->benign);
147 }
148 
IsExpectedReport(uptr addr,uptr size)149 bool IsExpectedReport(uptr addr, uptr size) {
150   ReadLock lock(&dyn_ann_ctx->mtx);
151   if (CheckContains(&dyn_ann_ctx->expect, addr, size))
152     return true;
153   if (CheckContains(&dyn_ann_ctx->benign, addr, size))
154     return true;
155   return false;
156 }
157 
CollectMatchedBenignRaces(Vector<ExpectRace> * matched,int * unique_count,int * hit_count,atomic_uintptr_t ExpectRace::* counter)158 static void CollectMatchedBenignRaces(Vector<ExpectRace> *matched,
159     int *unique_count, int *hit_count, atomic_uintptr_t ExpectRace::*counter) {
160   ExpectRace *list = &dyn_ann_ctx->benign;
161   for (ExpectRace *race = list->next; race != list; race = race->next) {
162     (*unique_count)++;
163     const uptr cnt = atomic_load_relaxed(&(race->*counter));
164     if (cnt == 0)
165       continue;
166     *hit_count += cnt;
167     uptr i = 0;
168     for (; i < matched->Size(); i++) {
169       ExpectRace *race0 = &(*matched)[i];
170       if (race->line == race0->line
171           && internal_strcmp(race->file, race0->file) == 0
172           && internal_strcmp(race->desc, race0->desc) == 0) {
173         atomic_fetch_add(&(race0->*counter), cnt, memory_order_relaxed);
174         break;
175       }
176     }
177     if (i == matched->Size())
178       matched->PushBack(*race);
179   }
180 }
181 
PrintMatchedBenignRaces()182 void PrintMatchedBenignRaces() {
183   Lock lock(&dyn_ann_ctx->mtx);
184   int unique_count = 0;
185   int hit_count = 0;
186   int add_count = 0;
187   Vector<ExpectRace> hit_matched;
188   CollectMatchedBenignRaces(&hit_matched, &unique_count, &hit_count,
189       &ExpectRace::hitcount);
190   Vector<ExpectRace> add_matched;
191   CollectMatchedBenignRaces(&add_matched, &unique_count, &add_count,
192       &ExpectRace::addcount);
193   if (hit_matched.Size()) {
194     Printf("ThreadSanitizer: Matched %d \"benign\" races (pid=%d):\n",
195         hit_count, (int)internal_getpid());
196     for (uptr i = 0; i < hit_matched.Size(); i++) {
197       Printf("%d %s:%d %s\n",
198           atomic_load_relaxed(&hit_matched[i].hitcount),
199           hit_matched[i].file, hit_matched[i].line, hit_matched[i].desc);
200     }
201   }
202   if (hit_matched.Size()) {
203     Printf("ThreadSanitizer: Annotated %d \"benign\" races, %d unique"
204            " (pid=%d):\n",
205         add_count, unique_count, (int)internal_getpid());
206     for (uptr i = 0; i < add_matched.Size(); i++) {
207       Printf("%d %s:%d %s\n",
208           atomic_load_relaxed(&add_matched[i].addcount),
209           add_matched[i].file, add_matched[i].line, add_matched[i].desc);
210     }
211   }
212 }
213 
ReportMissedExpectedRace(ExpectRace * race)214 static void ReportMissedExpectedRace(ExpectRace *race) {
215   Printf("==================\n");
216   Printf("WARNING: ThreadSanitizer: missed expected data race\n");
217   Printf("  %s addr=%zx %s:%d\n",
218       race->desc, race->addr, race->file, race->line);
219   Printf("==================\n");
220 }
221 }  // namespace __tsan
222 
223 using namespace __tsan;
224 
225 extern "C" {
AnnotateHappensBefore(char * f,int l,uptr addr)226 void INTERFACE_ATTRIBUTE AnnotateHappensBefore(char *f, int l, uptr addr) {
227   SCOPED_ANNOTATION(AnnotateHappensBefore);
228   Release(thr, pc, addr);
229 }
230 
AnnotateHappensAfter(char * f,int l,uptr addr)231 void INTERFACE_ATTRIBUTE AnnotateHappensAfter(char *f, int l, uptr addr) {
232   SCOPED_ANNOTATION(AnnotateHappensAfter);
233   Acquire(thr, pc, addr);
234 }
235 
AnnotateCondVarSignal(char * f,int l,uptr cv)236 void INTERFACE_ATTRIBUTE AnnotateCondVarSignal(char *f, int l, uptr cv) {
237   SCOPED_ANNOTATION(AnnotateCondVarSignal);
238 }
239 
AnnotateCondVarSignalAll(char * f,int l,uptr cv)240 void INTERFACE_ATTRIBUTE AnnotateCondVarSignalAll(char *f, int l, uptr cv) {
241   SCOPED_ANNOTATION(AnnotateCondVarSignalAll);
242 }
243 
AnnotateMutexIsNotPHB(char * f,int l,uptr mu)244 void INTERFACE_ATTRIBUTE AnnotateMutexIsNotPHB(char *f, int l, uptr mu) {
245   SCOPED_ANNOTATION(AnnotateMutexIsNotPHB);
246 }
247 
AnnotateCondVarWait(char * f,int l,uptr cv,uptr lock)248 void INTERFACE_ATTRIBUTE AnnotateCondVarWait(char *f, int l, uptr cv,
249                                              uptr lock) {
250   SCOPED_ANNOTATION(AnnotateCondVarWait);
251 }
252 
AnnotateRWLockCreate(char * f,int l,uptr m)253 void INTERFACE_ATTRIBUTE AnnotateRWLockCreate(char *f, int l, uptr m) {
254   SCOPED_ANNOTATION(AnnotateRWLockCreate);
255   MutexCreate(thr, pc, m, MutexFlagWriteReentrant);
256 }
257 
AnnotateRWLockCreateStatic(char * f,int l,uptr m)258 void INTERFACE_ATTRIBUTE AnnotateRWLockCreateStatic(char *f, int l, uptr m) {
259   SCOPED_ANNOTATION(AnnotateRWLockCreateStatic);
260   MutexCreate(thr, pc, m, MutexFlagWriteReentrant | MutexFlagLinkerInit);
261 }
262 
AnnotateRWLockDestroy(char * f,int l,uptr m)263 void INTERFACE_ATTRIBUTE AnnotateRWLockDestroy(char *f, int l, uptr m) {
264   SCOPED_ANNOTATION(AnnotateRWLockDestroy);
265   MutexDestroy(thr, pc, m);
266 }
267 
AnnotateRWLockAcquired(char * f,int l,uptr m,uptr is_w)268 void INTERFACE_ATTRIBUTE AnnotateRWLockAcquired(char *f, int l, uptr m,
269                                                 uptr is_w) {
270   SCOPED_ANNOTATION(AnnotateRWLockAcquired);
271   if (is_w)
272     MutexPostLock(thr, pc, m, MutexFlagDoPreLockOnPostLock);
273   else
274     MutexPostReadLock(thr, pc, m, MutexFlagDoPreLockOnPostLock);
275 }
276 
AnnotateRWLockReleased(char * f,int l,uptr m,uptr is_w)277 void INTERFACE_ATTRIBUTE AnnotateRWLockReleased(char *f, int l, uptr m,
278                                                 uptr is_w) {
279   SCOPED_ANNOTATION(AnnotateRWLockReleased);
280   if (is_w)
281     MutexUnlock(thr, pc, m);
282   else
283     MutexReadUnlock(thr, pc, m);
284 }
285 
AnnotateTraceMemory(char * f,int l,uptr mem)286 void INTERFACE_ATTRIBUTE AnnotateTraceMemory(char *f, int l, uptr mem) {
287   SCOPED_ANNOTATION(AnnotateTraceMemory);
288 }
289 
AnnotateFlushState(char * f,int l)290 void INTERFACE_ATTRIBUTE AnnotateFlushState(char *f, int l) {
291   SCOPED_ANNOTATION(AnnotateFlushState);
292 }
293 
AnnotateNewMemory(char * f,int l,uptr mem,uptr size)294 void INTERFACE_ATTRIBUTE AnnotateNewMemory(char *f, int l, uptr mem,
295                                            uptr size) {
296   SCOPED_ANNOTATION(AnnotateNewMemory);
297 }
298 
AnnotateNoOp(char * f,int l,uptr mem)299 void INTERFACE_ATTRIBUTE AnnotateNoOp(char *f, int l, uptr mem) {
300   SCOPED_ANNOTATION(AnnotateNoOp);
301 }
302 
AnnotateFlushExpectedRaces(char * f,int l)303 void INTERFACE_ATTRIBUTE AnnotateFlushExpectedRaces(char *f, int l) {
304   SCOPED_ANNOTATION(AnnotateFlushExpectedRaces);
305   Lock lock(&dyn_ann_ctx->mtx);
306   while (dyn_ann_ctx->expect.next != &dyn_ann_ctx->expect) {
307     ExpectRace *race = dyn_ann_ctx->expect.next;
308     if (atomic_load_relaxed(&race->hitcount) == 0) {
309       ctx->nmissed_expected++;
310       ReportMissedExpectedRace(race);
311     }
312     race->prev->next = race->next;
313     race->next->prev = race->prev;
314     internal_free(race);
315   }
316 }
317 
AnnotateEnableRaceDetection(char * f,int l,int enable)318 void INTERFACE_ATTRIBUTE AnnotateEnableRaceDetection(
319     char *f, int l, int enable) {
320   SCOPED_ANNOTATION(AnnotateEnableRaceDetection);
321   // FIXME: Reconsider this functionality later. It may be irrelevant.
322 }
323 
AnnotateMutexIsUsedAsCondVar(char * f,int l,uptr mu)324 void INTERFACE_ATTRIBUTE AnnotateMutexIsUsedAsCondVar(
325     char *f, int l, uptr mu) {
326   SCOPED_ANNOTATION(AnnotateMutexIsUsedAsCondVar);
327 }
328 
AnnotatePCQGet(char * f,int l,uptr pcq)329 void INTERFACE_ATTRIBUTE AnnotatePCQGet(
330     char *f, int l, uptr pcq) {
331   SCOPED_ANNOTATION(AnnotatePCQGet);
332 }
333 
AnnotatePCQPut(char * f,int l,uptr pcq)334 void INTERFACE_ATTRIBUTE AnnotatePCQPut(
335     char *f, int l, uptr pcq) {
336   SCOPED_ANNOTATION(AnnotatePCQPut);
337 }
338 
AnnotatePCQDestroy(char * f,int l,uptr pcq)339 void INTERFACE_ATTRIBUTE AnnotatePCQDestroy(
340     char *f, int l, uptr pcq) {
341   SCOPED_ANNOTATION(AnnotatePCQDestroy);
342 }
343 
AnnotatePCQCreate(char * f,int l,uptr pcq)344 void INTERFACE_ATTRIBUTE AnnotatePCQCreate(
345     char *f, int l, uptr pcq) {
346   SCOPED_ANNOTATION(AnnotatePCQCreate);
347 }
348 
AnnotateExpectRace(char * f,int l,uptr mem,char * desc)349 void INTERFACE_ATTRIBUTE AnnotateExpectRace(
350     char *f, int l, uptr mem, char *desc) {
351   SCOPED_ANNOTATION(AnnotateExpectRace);
352   Lock lock(&dyn_ann_ctx->mtx);
353   AddExpectRace(&dyn_ann_ctx->expect,
354                 f, l, mem, 1, desc);
355   DPrintf("Add expected race: %s addr=%zx %s:%d\n", desc, mem, f, l);
356 }
357 
BenignRaceImpl(char * f,int l,uptr mem,uptr size,char * desc)358 static void BenignRaceImpl(
359     char *f, int l, uptr mem, uptr size, char *desc) {
360   Lock lock(&dyn_ann_ctx->mtx);
361   AddExpectRace(&dyn_ann_ctx->benign,
362                 f, l, mem, size, desc);
363   DPrintf("Add benign race: %s addr=%zx %s:%d\n", desc, mem, f, l);
364 }
365 
366 // FIXME: Turn it off later. WTF is benign race?1?? Go talk to Hans Boehm.
AnnotateBenignRaceSized(char * f,int l,uptr mem,uptr size,char * desc)367 void INTERFACE_ATTRIBUTE AnnotateBenignRaceSized(
368     char *f, int l, uptr mem, uptr size, char *desc) {
369   SCOPED_ANNOTATION(AnnotateBenignRaceSized);
370   BenignRaceImpl(f, l, mem, size, desc);
371 }
372 
AnnotateBenignRace(char * f,int l,uptr mem,char * desc)373 void INTERFACE_ATTRIBUTE AnnotateBenignRace(
374     char *f, int l, uptr mem, char *desc) {
375   SCOPED_ANNOTATION(AnnotateBenignRace);
376   BenignRaceImpl(f, l, mem, 1, desc);
377 }
378 
AnnotateIgnoreReadsBegin(char * f,int l)379 void INTERFACE_ATTRIBUTE AnnotateIgnoreReadsBegin(char *f, int l) {
380   SCOPED_ANNOTATION(AnnotateIgnoreReadsBegin);
381   ThreadIgnoreBegin(thr, pc);
382 }
383 
AnnotateIgnoreReadsEnd(char * f,int l)384 void INTERFACE_ATTRIBUTE AnnotateIgnoreReadsEnd(char *f, int l) {
385   SCOPED_ANNOTATION(AnnotateIgnoreReadsEnd);
386   ThreadIgnoreEnd(thr, pc);
387 }
388 
AnnotateIgnoreWritesBegin(char * f,int l)389 void INTERFACE_ATTRIBUTE AnnotateIgnoreWritesBegin(char *f, int l) {
390   SCOPED_ANNOTATION(AnnotateIgnoreWritesBegin);
391   ThreadIgnoreBegin(thr, pc);
392 }
393 
AnnotateIgnoreWritesEnd(char * f,int l)394 void INTERFACE_ATTRIBUTE AnnotateIgnoreWritesEnd(char *f, int l) {
395   SCOPED_ANNOTATION(AnnotateIgnoreWritesEnd);
396   ThreadIgnoreEnd(thr, pc);
397 }
398 
AnnotateIgnoreSyncBegin(char * f,int l)399 void INTERFACE_ATTRIBUTE AnnotateIgnoreSyncBegin(char *f, int l) {
400   SCOPED_ANNOTATION(AnnotateIgnoreSyncBegin);
401   ThreadIgnoreSyncBegin(thr, pc);
402 }
403 
AnnotateIgnoreSyncEnd(char * f,int l)404 void INTERFACE_ATTRIBUTE AnnotateIgnoreSyncEnd(char *f, int l) {
405   SCOPED_ANNOTATION(AnnotateIgnoreSyncEnd);
406   ThreadIgnoreSyncEnd(thr, pc);
407 }
408 
AnnotatePublishMemoryRange(char * f,int l,uptr addr,uptr size)409 void INTERFACE_ATTRIBUTE AnnotatePublishMemoryRange(
410     char *f, int l, uptr addr, uptr size) {
411   SCOPED_ANNOTATION(AnnotatePublishMemoryRange);
412 }
413 
AnnotateUnpublishMemoryRange(char * f,int l,uptr addr,uptr size)414 void INTERFACE_ATTRIBUTE AnnotateUnpublishMemoryRange(
415     char *f, int l, uptr addr, uptr size) {
416   SCOPED_ANNOTATION(AnnotateUnpublishMemoryRange);
417 }
418 
AnnotateThreadName(char * f,int l,char * name)419 void INTERFACE_ATTRIBUTE AnnotateThreadName(
420     char *f, int l, char *name) {
421   SCOPED_ANNOTATION(AnnotateThreadName);
422   ThreadSetName(thr, name);
423 }
424 
425 // We deliberately omit the implementation of WTFAnnotateHappensBefore() and
426 // WTFAnnotateHappensAfter(). Those are being used by Webkit to annotate
427 // atomic operations, which should be handled by ThreadSanitizer correctly.
WTFAnnotateHappensBefore(char * f,int l,uptr addr)428 void INTERFACE_ATTRIBUTE WTFAnnotateHappensBefore(char *f, int l, uptr addr) {
429   SCOPED_ANNOTATION(AnnotateHappensBefore);
430 }
431 
WTFAnnotateHappensAfter(char * f,int l,uptr addr)432 void INTERFACE_ATTRIBUTE WTFAnnotateHappensAfter(char *f, int l, uptr addr) {
433   SCOPED_ANNOTATION(AnnotateHappensAfter);
434 }
435 
WTFAnnotateBenignRaceSized(char * f,int l,uptr mem,uptr sz,char * desc)436 void INTERFACE_ATTRIBUTE WTFAnnotateBenignRaceSized(
437     char *f, int l, uptr mem, uptr sz, char *desc) {
438   SCOPED_ANNOTATION(AnnotateBenignRaceSized);
439   BenignRaceImpl(f, l, mem, sz, desc);
440 }
441 
RunningOnValgrind()442 int INTERFACE_ATTRIBUTE RunningOnValgrind() {
443   return flags()->running_on_valgrind;
444 }
445 
ValgrindSlowdown(void)446 double __attribute__((weak)) INTERFACE_ATTRIBUTE ValgrindSlowdown(void) {
447   return 10.0;
448 }
449 
ThreadSanitizerQuery(const char * query)450 const char INTERFACE_ATTRIBUTE* ThreadSanitizerQuery(const char *query) {
451   if (internal_strcmp(query, "pure_happens_before") == 0)
452     return "1";
453   else
454     return "0";
455 }
456 
457 void INTERFACE_ATTRIBUTE
AnnotateMemoryIsInitialized(char * f,int l,uptr mem,uptr sz)458 AnnotateMemoryIsInitialized(char *f, int l, uptr mem, uptr sz) {}
459 void INTERFACE_ATTRIBUTE
AnnotateMemoryIsUninitialized(char * f,int l,uptr mem,uptr sz)460 AnnotateMemoryIsUninitialized(char *f, int l, uptr mem, uptr sz) {}
461 
462 // Note: the parameter is called flagz, because flags is already taken
463 // by the global function that returns flags.
464 INTERFACE_ATTRIBUTE
__tsan_mutex_create(void * m,unsigned flagz)465 void __tsan_mutex_create(void *m, unsigned flagz) {
466   SCOPED_ANNOTATION(__tsan_mutex_create);
467   MutexCreate(thr, pc, (uptr)m, flagz & MutexCreationFlagMask);
468 }
469 
470 INTERFACE_ATTRIBUTE
__tsan_mutex_destroy(void * m,unsigned flagz)471 void __tsan_mutex_destroy(void *m, unsigned flagz) {
472   SCOPED_ANNOTATION(__tsan_mutex_destroy);
473   MutexDestroy(thr, pc, (uptr)m, flagz);
474 }
475 
476 INTERFACE_ATTRIBUTE
__tsan_mutex_pre_lock(void * m,unsigned flagz)477 void __tsan_mutex_pre_lock(void *m, unsigned flagz) {
478   SCOPED_ANNOTATION(__tsan_mutex_pre_lock);
479   if (!(flagz & MutexFlagTryLock)) {
480     if (flagz & MutexFlagReadLock)
481       MutexPreReadLock(thr, pc, (uptr)m);
482     else
483       MutexPreLock(thr, pc, (uptr)m);
484   }
485   ThreadIgnoreBegin(thr, pc, /*save_stack=*/false);
486   ThreadIgnoreSyncBegin(thr, pc, /*save_stack=*/false);
487 }
488 
489 INTERFACE_ATTRIBUTE
__tsan_mutex_post_lock(void * m,unsigned flagz,int rec)490 void __tsan_mutex_post_lock(void *m, unsigned flagz, int rec) {
491   SCOPED_ANNOTATION(__tsan_mutex_post_lock);
492   ThreadIgnoreSyncEnd(thr, pc);
493   ThreadIgnoreEnd(thr, pc);
494   if (!(flagz & MutexFlagTryLockFailed)) {
495     if (flagz & MutexFlagReadLock)
496       MutexPostReadLock(thr, pc, (uptr)m, flagz);
497     else
498       MutexPostLock(thr, pc, (uptr)m, flagz, rec);
499   }
500 }
501 
502 INTERFACE_ATTRIBUTE
__tsan_mutex_pre_unlock(void * m,unsigned flagz)503 int __tsan_mutex_pre_unlock(void *m, unsigned flagz) {
504   SCOPED_ANNOTATION_RET(__tsan_mutex_pre_unlock, 0);
505   int ret = 0;
506   if (flagz & MutexFlagReadLock) {
507     CHECK(!(flagz & MutexFlagRecursiveUnlock));
508     MutexReadUnlock(thr, pc, (uptr)m);
509   } else {
510     ret = MutexUnlock(thr, pc, (uptr)m, flagz);
511   }
512   ThreadIgnoreBegin(thr, pc, /*save_stack=*/false);
513   ThreadIgnoreSyncBegin(thr, pc, /*save_stack=*/false);
514   return ret;
515 }
516 
517 INTERFACE_ATTRIBUTE
__tsan_mutex_post_unlock(void * m,unsigned flagz)518 void __tsan_mutex_post_unlock(void *m, unsigned flagz) {
519   SCOPED_ANNOTATION(__tsan_mutex_post_unlock);
520   ThreadIgnoreSyncEnd(thr, pc);
521   ThreadIgnoreEnd(thr, pc);
522 }
523 
524 INTERFACE_ATTRIBUTE
__tsan_mutex_pre_signal(void * addr,unsigned flagz)525 void __tsan_mutex_pre_signal(void *addr, unsigned flagz) {
526   SCOPED_ANNOTATION(__tsan_mutex_pre_signal);
527   ThreadIgnoreBegin(thr, pc, /*save_stack=*/false);
528   ThreadIgnoreSyncBegin(thr, pc, /*save_stack=*/false);
529 }
530 
531 INTERFACE_ATTRIBUTE
__tsan_mutex_post_signal(void * addr,unsigned flagz)532 void __tsan_mutex_post_signal(void *addr, unsigned flagz) {
533   SCOPED_ANNOTATION(__tsan_mutex_post_signal);
534   ThreadIgnoreSyncEnd(thr, pc);
535   ThreadIgnoreEnd(thr, pc);
536 }
537 
538 INTERFACE_ATTRIBUTE
__tsan_mutex_pre_divert(void * addr,unsigned flagz)539 void __tsan_mutex_pre_divert(void *addr, unsigned flagz) {
540   SCOPED_ANNOTATION(__tsan_mutex_pre_divert);
541   // Exit from ignore region started in __tsan_mutex_pre_lock/unlock/signal.
542   ThreadIgnoreSyncEnd(thr, pc);
543   ThreadIgnoreEnd(thr, pc);
544 }
545 
546 INTERFACE_ATTRIBUTE
__tsan_mutex_post_divert(void * addr,unsigned flagz)547 void __tsan_mutex_post_divert(void *addr, unsigned flagz) {
548   SCOPED_ANNOTATION(__tsan_mutex_post_divert);
549   ThreadIgnoreBegin(thr, pc, /*save_stack=*/false);
550   ThreadIgnoreSyncBegin(thr, pc, /*save_stack=*/false);
551 }
552 }  // extern "C"
553