1c2c66affSColin Finck /*
2c2c66affSColin Finck * PROJECT: ReactOS Kernel
3c2c66affSColin Finck * LICENSE: GPL - See COPYING in the top level directory
4c2c66affSColin Finck * FILE: ntoskrnl/include/internal/spinlock.h
5c2c66affSColin Finck * PURPOSE: Internal Inlined Functions for spinlocks, shared with HAL
6c2c66affSColin Finck * PROGRAMMERS: Alex Ionescu (alex.ionescu@reactos.org)
7c2c66affSColin Finck */
8c2c66affSColin Finck
9*f30136bcSJérôme Gardou #if defined(_M_IX86)
10c2c66affSColin Finck VOID
11c2c66affSColin Finck NTAPI
12c2c66affSColin Finck Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags);
13*f30136bcSJérôme Gardou #endif
14c2c66affSColin Finck
15c2c66affSColin Finck //
16c2c66affSColin Finck // Spinlock Acquisition at IRQL >= DISPATCH_LEVEL
17c2c66affSColin Finck //
_Acquires_nonreentrant_lock_(SpinLock)18*f30136bcSJérôme Gardou _Acquires_nonreentrant_lock_(SpinLock)
19c2c66affSColin Finck FORCEINLINE
20c2c66affSColin Finck VOID
21*f30136bcSJérôme Gardou KxAcquireSpinLock(
22*f30136bcSJérôme Gardou #if defined(CONFIG_SMP) || DBG
23*f30136bcSJérôme Gardou _Inout_
24*f30136bcSJérôme Gardou #else
25*f30136bcSJérôme Gardou _Unreferenced_parameter_
26*f30136bcSJérôme Gardou #endif
27*f30136bcSJérôme Gardou PKSPIN_LOCK SpinLock)
28c2c66affSColin Finck {
29c2c66affSColin Finck #if DBG
30c2c66affSColin Finck /* Make sure that we don't own the lock already */
31c2c66affSColin Finck if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
32c2c66affSColin Finck {
33c2c66affSColin Finck /* We do, bugcheck! */
34c2c66affSColin Finck KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
35c2c66affSColin Finck }
36c2c66affSColin Finck #endif
37c2c66affSColin Finck
38*f30136bcSJérôme Gardou #ifdef CONFIG_SMP
39c2c66affSColin Finck /* Try to acquire the lock */
40c2c66affSColin Finck while (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
41c2c66affSColin Finck {
42c2c66affSColin Finck #if defined(_M_IX86) && DBG
43c2c66affSColin Finck /* On x86 debug builds, we use a much slower but useful routine */
44c2c66affSColin Finck Kii386SpinOnSpinLock(SpinLock, 5);
45c2c66affSColin Finck #else
46c2c66affSColin Finck /* It's locked... spin until it's unlocked */
47c2c66affSColin Finck while (*(volatile KSPIN_LOCK *)SpinLock & 1)
48c2c66affSColin Finck {
49c2c66affSColin Finck /* Yield and keep looping */
50c2c66affSColin Finck YieldProcessor();
51c2c66affSColin Finck }
52c2c66affSColin Finck #endif
53c2c66affSColin Finck }
54*f30136bcSJérôme Gardou #endif
55*f30136bcSJérôme Gardou
56*f30136bcSJérôme Gardou /* Add an explicit memory barrier to prevent the compiler from reordering
57*f30136bcSJérôme Gardou memory accesses across the borders of spinlocks */
58*f30136bcSJérôme Gardou KeMemoryBarrierWithoutFence();
59*f30136bcSJérôme Gardou
60c2c66affSColin Finck #if DBG
61c2c66affSColin Finck /* On debug builds, we OR in the KTHREAD */
62c2c66affSColin Finck *SpinLock = (KSPIN_LOCK)KeGetCurrentThread() | 1;
63c2c66affSColin Finck #endif
64c2c66affSColin Finck }
65c2c66affSColin Finck
66c2c66affSColin Finck //
67c2c66affSColin Finck // Spinlock Release at IRQL >= DISPATCH_LEVEL
68c2c66affSColin Finck //
_Releases_nonreentrant_lock_(SpinLock)69*f30136bcSJérôme Gardou _Releases_nonreentrant_lock_(SpinLock)
70c2c66affSColin Finck FORCEINLINE
71c2c66affSColin Finck VOID
72*f30136bcSJérôme Gardou KxReleaseSpinLock(
73*f30136bcSJérôme Gardou #if defined(CONFIG_SMP) || DBG
74*f30136bcSJérôme Gardou _Inout_
75*f30136bcSJérôme Gardou #else
76*f30136bcSJérôme Gardou _Unreferenced_parameter_
77*f30136bcSJérôme Gardou #endif
78*f30136bcSJérôme Gardou PKSPIN_LOCK SpinLock)
79c2c66affSColin Finck {
80c2c66affSColin Finck #if DBG
81c2c66affSColin Finck /* Make sure that the threads match */
82c2c66affSColin Finck if (((KSPIN_LOCK)KeGetCurrentThread() | 1) != *SpinLock)
83c2c66affSColin Finck {
84c2c66affSColin Finck /* They don't, bugcheck */
85c2c66affSColin Finck KeBugCheckEx(SPIN_LOCK_NOT_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
86c2c66affSColin Finck }
87c2c66affSColin Finck #endif
88*f30136bcSJérôme Gardou
89*f30136bcSJérôme Gardou #if defined(CONFIG_SMP) || DBG
90c2c66affSColin Finck /* Clear the lock */
9162f62da8STimo Kreuzer #ifdef _WIN64
9262f62da8STimo Kreuzer InterlockedAnd64((PLONG64)SpinLock, 0);
9362f62da8STimo Kreuzer #else
94c2c66affSColin Finck InterlockedAnd((PLONG)SpinLock, 0);
9562f62da8STimo Kreuzer #endif
96c2c66affSColin Finck #endif
97*f30136bcSJérôme Gardou
98*f30136bcSJérôme Gardou /* Add an explicit memory barrier to prevent the compiler from reordering
99*f30136bcSJérôme Gardou memory accesses across the borders of spinlocks */
100*f30136bcSJérôme Gardou KeMemoryBarrierWithoutFence();
101*f30136bcSJérôme Gardou }
102