1 /*
2 * PROJECT:         ReactOS Kernel
3 * LICENSE:         GPL - See COPYING in the top level directory
4 * FILE:            ntoskrnl/include/internal/spinlock.h
5 * PURPOSE:         Internal Inlined Functions for spinlocks, shared with HAL
6 * PROGRAMMERS:     Alex Ionescu (alex.ionescu@reactos.org)
7 */
8 
9 #if defined(_M_IX86)
10 VOID
11 NTAPI
12 Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
13 #endif
14 
15 //
16 // Spinlock Acquisition at IRQL >= DISPATCH_LEVEL
17 //
18 _Acquires_nonreentrant_lock_(SpinLock)
19 FORCEINLINE
20 VOID
21 KxAcquireSpinLock(
22 #if defined(CONFIG_SMP) || DBG
23     _Inout_
24 #else
25     _Unreferenced_parameter_
26 #endif
27     PKSPIN_LOCK SpinLock)
28 {
29 #if DBG
30     /* Make sure that we don't own the lock already */
31     if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
32     {
33         /* We do, bugcheck! */
34         KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
35     }
36 #endif
37 
38 #ifdef CONFIG_SMP
39     /* Try to acquire the lock */
40     while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
41     {
42 #if defined(_M_IX86) && DBG
43         /* On x86 debug builds, we use a much slower but useful routine */
44         Kii386SpinOnSpinLock(SpinLock, 5);
45 #else
46         /* It's locked... spin until it's unlocked */
47         while (*(volatile KSPIN_LOCK *)SpinLock & 1)
48         {
49                 /* Yield and keep looping */
50                 YieldProcessor();
51         }
52 #endif
53     }
54 #endif
55 
56     /* Add an explicit memory barrier to prevent the compiler from reordering
57        memory accesses across the borders of spinlocks */
58     KeMemoryBarrierWithoutFence();
59 
60 #if DBG
61     /* On debug builds, we OR in the KTHREAD */
62     *SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
63 #endif
64 }
65 
66 //
67 // Spinlock Release at IRQL >= DISPATCH_LEVEL
68 //
69 _Releases_nonreentrant_lock_(SpinLock)
70 FORCEINLINE
71 VOID
72 KxReleaseSpinLock(
73 #if defined(CONFIG_SMP) || DBG
74     _Inout_
75 #else
76     _Unreferenced_parameter_
77 #endif
78     PKSPIN_LOCK SpinLock)
79 {
80 #if DBG
81     /* Make sure that the threads match */
82     if (((KSPIN_LOCK)KeGetCurrentThread() | 1) != *SpinLock)
83     {
84         /* They don't, bugcheck */
85         KeBugCheckEx(SPIN_LOCK_NOT_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
86     }
87 #endif
88 
89 #if defined(CONFIG_SMP) || DBG
90     /* Clear the lock  */
91 #ifdef _WIN64
92     InterlockedAnd64((PLONG64)SpinLock, 0);
93 #else
94     InterlockedAnd((PLONG)SpinLock, 0);
95 #endif
96 #endif
97 
98     /* Add an explicit memory barrier to prevent the compiler from reordering
99        memory accesses across the borders of spinlocks */
100     KeMemoryBarrierWithoutFence();
101 }
102