1 /*
2 * PROJECT:         ReactOS Kernel
3 * LICENSE:         GPL - See COPYING in the top level directory
4 * FILE:            ntoskrnl/include/internal/spinlock.h
5 * PURPOSE:         Internal Inlined Functions for spinlocks, shared with HAL
6 * PROGRAMMERS:     Alex Ionescu (alex.ionescu@reactos.org)
7 */
8 
9 VOID
10 NTAPI
11 Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
12 
13 #ifndef CONFIG_SMP
14 
15 //
16 // Spinlock Acquire at IRQL >= DISPATCH_LEVEL
17 //
18 FORCEINLINE
19 VOID
20 KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
21 {
22     /* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
23     UNREFERENCED_PARAMETER(SpinLock);
24 
25     /* Add an explicit memory barrier to prevent the compiler from reordering
26        memory accesses across the borders of spinlocks */
27     KeMemoryBarrierWithoutFence();
28 }
29 
30 //
31 // Spinlock Release at IRQL >= DISPATCH_LEVEL
32 //
33 FORCEINLINE
34 VOID
35 KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
36 {
37     /* On UP builds, spinlocks don't exist at IRQL >= DISPATCH */
38     UNREFERENCED_PARAMETER(SpinLock);
39 
40     /* Add an explicit memory barrier to prevent the compiler from reordering
41        memory accesses across the borders of spinlocks */
42     KeMemoryBarrierWithoutFence();
43 }
44 
45 #else
46 
47 //
48 // Spinlock Acquisition at IRQL >= DISPATCH_LEVEL
49 //
50 FORCEINLINE
51 VOID
52 KxAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
53 {
54 #if DBG
55     /* Make sure that we don't own the lock already */
56     if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
57     {
58         /* We do, bugcheck! */
59         KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
60     }
61 #endif
62 
63     /* Try to acquire the lock */
64     while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
65     {
66 #if defined(_M_IX86) && DBG
67         /* On x86 debug builds, we use a much slower but useful routine */
68         Kii386SpinOnSpinLock(SpinLock, 5);
69 #else
70         /* It's locked... spin until it's unlocked */
71         while (*(volatile KSPIN_LOCK *)SpinLock & 1)
72         {
73                 /* Yield and keep looping */
74                 YieldProcessor();
75         }
76 #endif
77     }
78 #if DBG
79     /* On debug builds, we OR in the KTHREAD */
80     *SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
81 #endif
82 }
83 
84 //
85 // Spinlock Release at IRQL >= DISPATCH_LEVEL
86 //
87 FORCEINLINE
88 VOID
89 KxReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
90 {
91 #if DBG
92     /* Make sure that the threads match */
93     if (((KSPIN_LOCK)KeGetCurrentThread() | 1) != *SpinLock)
94     {
95         /* They don't, bugcheck */
96         KeBugCheckEx(SPIN_LOCK_NOT_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
97     }
98 #endif
99     /* Clear the lock */
100 #ifdef _WIN64
101     InterlockedAnd64((PLONG64)SpinLock, 0);
102 #else
103     InterlockedAnd((PLONG)SpinLock, 0);
104 #endif
105 }
106 
107 #endif
108