xref: /reactos/ntoskrnl/ke/spinlock.c (revision 37d0a9c3)
1 /*
2  * PROJECT:         ReactOS Kernel
3  * LICENSE:         GPL - See COPYING in the top level directory
4  * FILE:            ntoskrnl/ke/spinlock.c
5  * PURPOSE:         Spinlock and Queued Spinlock Support
6  * PROGRAMMERS:     Alex Ionescu (alex.ionescu@reactos.org)
7  */
8 
9 /* INCLUDES ******************************************************************/
10 
11 #include <ntoskrnl.h>
12 #define NDEBUG
13 #include <debug.h>
14 
15 #define LQ_WAIT     1
16 #define LQ_OWN      2
17 
18 /* PRIVATE FUNCTIONS *********************************************************/
19 
20 #if 0
21 //
22 // FIXME: The queued spinlock routines are broken.
23 //
24 
25 VOID
26 FASTCALL
27 KeAcquireQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
28 {
29 #ifdef CONFIG_SMP
30     PKSPIN_LOCK_QUEUE Prev;
31 
32     /* Set the new lock */
33     Prev = (PKSPIN_LOCK_QUEUE)
34            InterlockedExchange((PLONG)LockHandle->Next,
35                                (LONG)LockHandle);
36     if (!Prev)
37     {
38         /* There was nothing there before. We now own it */
39          *LockHandle->Lock |= LQ_OWN;
40         return;
41     }
42 
43     /* Set the wait flag */
44      *LockHandle->Lock |= LQ_WAIT;
45 
46     /* Link us */
47     Prev->Next = (PKSPIN_LOCK_QUEUE)LockHandle;
48 
49     /* Loop and wait */
50     while (*LockHandle->Lock & LQ_WAIT)
51         YieldProcessor();
52 #endif
53 }
54 
55 VOID
56 FASTCALL
57 KeReleaseQueuedSpinLockFromDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
58 {
59 #ifdef CONFIG_SMP
60     KSPIN_LOCK LockVal;
61     PKSPIN_LOCK_QUEUE Waiter;
62 
63     /* Remove own and wait flags */
64     *LockHandle->Lock &= ~(LQ_OWN | LQ_WAIT);
65     LockVal = *LockHandle->Lock;
66 
67     /* Check if we already own it */
68     if (LockVal == (KSPIN_LOCK)LockHandle)
69     {
70         /* Disown it */
71         LockVal = (KSPIN_LOCK)
72                   InterlockedCompareExchangePointer(LockHandle->Lock,
73                                                     NULL,
74                                                     LockHandle);
75     }
76     if (LockVal == (KSPIN_LOCK)LockHandle) return;
77 
78     /* Need to wait for it */
79     Waiter = LockHandle->Next;
80     while (!Waiter)
81     {
82         YieldProcessor();
83         Waiter = LockHandle->Next;
84     }
85 
86     /* It's gone */
87     *(ULONG_PTR*)&Waiter->Lock ^= (LQ_OWN | LQ_WAIT);
88     LockHandle->Next = NULL;
89 #endif
90 }
91 
92 #else
93 //
94 // HACK: Hacked to work like normal spinlocks
95 //
96 
97 _IRQL_requires_min_(DISPATCH_LEVEL)
98 _Acquires_nonreentrant_lock_(*LockHandle->Lock)
99 _Acquires_exclusive_lock_(*LockHandle->Lock)
100 VOID
101 FASTCALL
KeAcquireQueuedSpinLockAtDpcLevel(_Inout_ PKSPIN_LOCK_QUEUE LockHandle)102 KeAcquireQueuedSpinLockAtDpcLevel(_Inout_ PKSPIN_LOCK_QUEUE LockHandle)
103 {
104 #if defined(CONFIG_SMP) || DBG
105     /* Make sure we are at DPC or above! */
106     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
107     {
108         /* We aren't -- bugcheck */
109         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
110                      (ULONG_PTR)LockHandle->Lock,
111                      KeGetCurrentIrql(),
112                      0,
113                      0);
114     }
115 #endif
116 
117     /* Do the inlined function */
118     KxAcquireSpinLock(LockHandle->Lock);
119 }
120 
121 _IRQL_requires_min_(DISPATCH_LEVEL)
122 _Releases_nonreentrant_lock_(*LockHandle->Lock)
123 _Releases_exclusive_lock_(*LockHandle->Lock)
124 VOID
125 FASTCALL
KeReleaseQueuedSpinLockFromDpcLevel(_Inout_ PKSPIN_LOCK_QUEUE LockHandle)126 KeReleaseQueuedSpinLockFromDpcLevel(_Inout_ PKSPIN_LOCK_QUEUE LockHandle)
127 {
128 #if defined(CONFIG_SMP) || DBG
129     /* Make sure we are at DPC or above! */
130     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
131     {
132         /* We aren't -- bugcheck */
133         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
134                      (ULONG_PTR)LockHandle->Lock,
135                      KeGetCurrentIrql(),
136                      0,
137                      0);
138     }
139 #endif
140 
141     /* Do the inlined function */
142     KxReleaseSpinLock(LockHandle->Lock);
143 }
144 
145 #endif
146 
147 /* PUBLIC FUNCTIONS **********************************************************/
148 
149 /*
150  * @implemented
151  */
152 KIRQL
153 NTAPI
KeAcquireInterruptSpinLock(IN PKINTERRUPT Interrupt)154 KeAcquireInterruptSpinLock(IN PKINTERRUPT Interrupt)
155 {
156     KIRQL OldIrql;
157 
158     /* Raise IRQL */
159     KeRaiseIrql(Interrupt->SynchronizeIrql, &OldIrql);
160 
161     /* Acquire spinlock on MP */
162     KeAcquireSpinLockAtDpcLevel(Interrupt->ActualLock);
163     return OldIrql;
164 }
165 
166 /*
167  * @implemented
168  */
169 VOID
170 NTAPI
KeReleaseInterruptSpinLock(IN PKINTERRUPT Interrupt,IN KIRQL OldIrql)171 KeReleaseInterruptSpinLock(IN PKINTERRUPT Interrupt,
172                            IN KIRQL OldIrql)
173 {
174     /* Release lock on MP */
175     KeReleaseSpinLockFromDpcLevel(Interrupt->ActualLock);
176 
177     /* Lower IRQL */
178     KeLowerIrql(OldIrql);
179 }
180 
181 /*
182  * @implemented
183  */
184 VOID
185 NTAPI
_KeInitializeSpinLock(IN PKSPIN_LOCK SpinLock)186 _KeInitializeSpinLock(IN PKSPIN_LOCK SpinLock)
187 {
188     /* Clear it */
189     *SpinLock = 0;
190 }
191 
192 /*
193  * @implemented
194  */
195 #undef KeAcquireSpinLockAtDpcLevel
196 VOID
197 NTAPI
KeAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)198 KeAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)
199 {
200     /* Make sure we are at DPC or above! */
201     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
202     {
203         /* We aren't -- bugcheck */
204         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
205                      (ULONG_PTR)SpinLock,
206                      KeGetCurrentIrql(),
207                      0,
208                      0);
209     }
210 
211     /* Do the inlined function */
212     KxAcquireSpinLock(SpinLock);
213 }
214 
215 /*
216  * @implemented
217  */
218 #undef KeReleaseSpinLockFromDpcLevel
219 VOID
220 NTAPI
KeReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)221 KeReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)
222 {
223     /* Make sure we are at DPC or above! */
224     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
225     {
226         /* We aren't -- bugcheck */
227         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
228                      (ULONG_PTR)SpinLock,
229                      KeGetCurrentIrql(),
230                      0,
231                      0);
232     }
233 
234     /* Do the inlined function */
235     KxReleaseSpinLock(SpinLock);
236 }
237 
238 /*
239  * @implemented
240  */
241 VOID
242 FASTCALL
KefAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)243 KefAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)
244 {
245     /* Make sure we are at DPC or above! */
246     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
247     {
248         /* We aren't -- bugcheck */
249         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
250                      (ULONG_PTR)SpinLock,
251                      KeGetCurrentIrql(),
252                      0,
253                      0);
254     }
255 
256     /* Do the inlined function */
257     KxAcquireSpinLock(SpinLock);
258 }
259 
260 /*
261  * @implemented
262  */
263 VOID
264 FASTCALL
KefReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)265 KefReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)
266 {
267     /* Make sure we are at DPC or above! */
268     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
269     {
270         /* We aren't -- bugcheck */
271         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
272                      (ULONG_PTR)SpinLock,
273                      KeGetCurrentIrql(),
274                      0,
275                      0);
276     }
277 
278     /* Do the inlined function */
279     KxReleaseSpinLock(SpinLock);
280 }
281 
282 /*
283  * @implemented
284  */
285 VOID
286 FASTCALL
KiAcquireSpinLock(IN PKSPIN_LOCK SpinLock)287 KiAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
288 {
289     /* Do the inlined function */
290     KxAcquireSpinLock(SpinLock);
291 }
292 
293 /*
294  * @implemented
295  */
296 VOID
297 FASTCALL
KiReleaseSpinLock(IN PKSPIN_LOCK SpinLock)298 KiReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
299 {
300     /* Do the inlined function */
301     KxReleaseSpinLock(SpinLock);
302 }
303 
304 /*
305  * @implemented
306  */
307 BOOLEAN
308 FASTCALL
KeTryToAcquireSpinLockAtDpcLevel(IN OUT PKSPIN_LOCK SpinLock)309 KeTryToAcquireSpinLockAtDpcLevel(IN OUT PKSPIN_LOCK SpinLock)
310 {
311 #if DBG
312     /* Make sure we are at DPC or above! */
313     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
314     {
315         /* We aren't -- bugcheck */
316         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
317                      (ULONG_PTR)SpinLock,
318                      KeGetCurrentIrql(),
319                      0,
320                      0);
321     }
322 
323     /* Make sure that we don't own the lock already */
324     if (((KSPIN_LOCK)KeGetCurrentThread() | 1) == *SpinLock)
325     {
326         /* We do, bugcheck! */
327         KeBugCheckEx(SPIN_LOCK_ALREADY_OWNED, (ULONG_PTR)SpinLock, 0, 0, 0);
328     }
329 #endif
330 
331 #ifdef CONFIG_SMP
332     /* Check if it's already acquired */
333     if (!(*SpinLock))
334     {
335         /* Try to acquire it */
336         if (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
337         {
338             /* Someone else acquired it */
339             return FALSE;
340         }
341     }
342     else
343     {
344         /* It was already acquired */
345         return FALSE;
346     }
347 #endif
348 
349 #if DBG
350     /* On debug builds, we OR in the KTHREAD */
351     *SpinLock = (ULONG_PTR)KeGetCurrentThread() | 1;
352 #endif
353 
354     /* All is well, return TRUE */
355     return TRUE;
356 }
357 
358 /*
359  * @implemented
360  */
361 VOID
362 FASTCALL
KeAcquireInStackQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock,IN PKLOCK_QUEUE_HANDLE LockHandle)363 KeAcquireInStackQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock,
364                                          IN PKLOCK_QUEUE_HANDLE LockHandle)
365 {
366     /* Set it up properly */
367     LockHandle->LockQueue.Next = NULL;
368     LockHandle->LockQueue.Lock = SpinLock;
369 #ifdef CONFIG_SMP
370 #if 0
371     KeAcquireQueuedSpinLockAtDpcLevel(LockHandle->LockQueue.Next);
372 #else
373     /* Make sure we are at DPC or above! */
374     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
375     {
376         /* We aren't -- bugcheck */
377         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
378                      (ULONG_PTR)LockHandle->LockQueue.Lock,
379                      KeGetCurrentIrql(),
380                      0,
381                      0);
382     }
383 #endif
384 #endif
385 
386     /* Acquire the lock */
387     KxAcquireSpinLock(LockHandle->LockQueue.Lock); // HACK
388 }
389 
390 /*
391  * @implemented
392  */
393 VOID
394 FASTCALL
KeReleaseInStackQueuedSpinLockFromDpcLevel(IN PKLOCK_QUEUE_HANDLE LockHandle)395 KeReleaseInStackQueuedSpinLockFromDpcLevel(IN PKLOCK_QUEUE_HANDLE LockHandle)
396 {
397 #ifdef CONFIG_SMP
398 #if 0
399     /* Call the internal function */
400     KeReleaseQueuedSpinLockFromDpcLevel(LockHandle->LockQueue.Next);
401 #else
402     /* Make sure we are at DPC or above! */
403     if (KeGetCurrentIrql() < DISPATCH_LEVEL)
404     {
405         /* We aren't -- bugcheck */
406         KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
407                      (ULONG_PTR)LockHandle->LockQueue.Lock,
408                      KeGetCurrentIrql(),
409                      0,
410                      0);
411     }
412 #endif
413 #endif
414 
415     /* Release the lock */
416     KxReleaseSpinLock(LockHandle->LockQueue.Lock); // HACK
417 }
418 
419 /*
420  * @unimplemented
421  */
422 KIRQL
423 FASTCALL
KeAcquireSpinLockForDpc(IN PKSPIN_LOCK SpinLock)424 KeAcquireSpinLockForDpc(IN PKSPIN_LOCK SpinLock)
425 {
426     UNIMPLEMENTED;
427     return 0;
428 }
429 
430 /*
431  * @unimplemented
432  */
433 VOID
434 FASTCALL
KeReleaseSpinLockForDpc(IN PKSPIN_LOCK SpinLock,IN KIRQL OldIrql)435 KeReleaseSpinLockForDpc(IN PKSPIN_LOCK SpinLock,
436                         IN KIRQL OldIrql)
437 {
438     UNIMPLEMENTED;
439 }
440 
441 /*
442  * @implemented
443  */
444 VOID
445 FASTCALL
KeAcquireInStackQueuedSpinLockForDpc(IN PKSPIN_LOCK SpinLock,IN PKLOCK_QUEUE_HANDLE LockHandle)446 KeAcquireInStackQueuedSpinLockForDpc(IN PKSPIN_LOCK SpinLock,
447                                      IN PKLOCK_QUEUE_HANDLE LockHandle)
448 {
449     LockHandle->OldIrql = KeGetCurrentIrql();
450     if (LockHandle->OldIrql >= DISPATCH_LEVEL)
451         KeAcquireInStackQueuedSpinLockAtDpcLevel(SpinLock, LockHandle);
452     else
453         KeAcquireInStackQueuedSpinLock(SpinLock, LockHandle);
454 }
455 
456 /*
457  * @implemented
458  */
459 VOID
460 FASTCALL
KeReleaseInStackQueuedSpinLockForDpc(IN PKLOCK_QUEUE_HANDLE LockHandle)461 KeReleaseInStackQueuedSpinLockForDpc(IN PKLOCK_QUEUE_HANDLE LockHandle)
462 {
463     if (LockHandle->OldIrql >= DISPATCH_LEVEL)
464         KeReleaseInStackQueuedSpinLockFromDpcLevel(LockHandle);
465     else
466         KeReleaseInStackQueuedSpinLock(LockHandle);
467 
468 }
469 
470 /*
471  * @implemented
472  */
473 BOOLEAN
474 FASTCALL
KeTestSpinLock(IN PKSPIN_LOCK SpinLock)475 KeTestSpinLock(IN PKSPIN_LOCK SpinLock)
476 {
477     /* Test this spinlock */
478     if (*SpinLock)
479     {
480         /* Spinlock is busy, yield execution */
481         YieldProcessor();
482 
483         /* Return busy flag */
484         return FALSE;
485     }
486 
487     /* Spinlock appears to be free */
488     return TRUE;
489 }
490 
491 #ifdef _M_IX86
492 VOID
493 NTAPI
Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock,ULONG Flags)494 Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags)
495 {
496     // FIXME: Handle flags
497     UNREFERENCED_PARAMETER(Flags);
498 
499     /* Spin until it's unlocked */
500     while (*(volatile KSPIN_LOCK *)SpinLock & 1)
501     {
502         // FIXME: Check for timeout
503 
504         /* Yield and keep looping */
505         YieldProcessor();
506     }
507 }
508 #endif
509