[USB-BRINGUP-TRUNK]
[reactos.git] / ntoskrnl / ke / spinlock.c
1 /*
2 * PROJECT: ReactOS Kernel
3 * LICENSE: GPL - See COPYING in the top level directory
4 * FILE: ntoskrnl/ke/spinlock.c
5 * PURPOSE: Spinlock and Queued Spinlock Support
6 * PROGRAMMERS: Alex Ionescu (alex.ionescu@reactos.org)
7 */
8
9 /* INCLUDES ******************************************************************/
10
11 #include <ntoskrnl.h>
12 #define NDEBUG
13 #include <debug.h>
14
15 #define LQ_WAIT 1
16 #define LQ_OWN 2
17
18 /* PRIVATE FUNCTIONS *********************************************************/
19
20 #if 0
21 //
22 // FIXME: The queued spinlock routines are broken.
23 //
24
25 VOID
26 FASTCALL
27 KeAcquireQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
28 {
29 #ifdef CONFIG_SMP
30 PKSPIN_LOCK_QUEUE Prev;
31
32 /* Set the new lock */
33 Prev = (PKSPIN_LOCK_QUEUE)
34 InterlockedExchange((PLONG)LockHandle->Next,
35 (LONG)LockHandle);
36 if (!Prev)
37 {
38 /* There was nothing there before. We now own it */
39 *LockHandle->Lock |= LQ_OWN;
40 return;
41 }
42
43 /* Set the wait flag */
44 *LockHandle->Lock |= LQ_WAIT;
45
46 /* Link us */
47 Prev->Next = (PKSPIN_LOCK_QUEUE)LockHandle;
48
49 /* Loop and wait */
50 while (*LockHandle->Lock & LQ_WAIT)
51 YieldProcessor();
52 #endif
53 }
54
55 VOID
56 FASTCALL
57 KeReleaseQueuedSpinLockFromDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
58 {
59 #ifdef CONFIG_SMP
60 KSPIN_LOCK LockVal;
61 PKSPIN_LOCK_QUEUE Waiter;
62
63 /* Remove own and wait flags */
64 *LockHandle->Lock &= ~(LQ_OWN | LQ_WAIT);
65 LockVal = *LockHandle->Lock;
66
67 /* Check if we already own it */
68 if (LockVal == (KSPIN_LOCK)LockHandle)
69 {
70 /* Disown it */
71 LockVal = (KSPIN_LOCK)
72 InterlockedCompareExchangePointer(LockHandle->Lock,
73 NULL,
74 LockHandle);
75 }
76 if (LockVal == (KSPIN_LOCK)LockHandle) return;
77
78 /* Need to wait for it */
79 Waiter = LockHandle->Next;
80 while (!Waiter)
81 {
82 YieldProcessor();
83 Waiter = LockHandle->Next;
84 }
85
86 /* It's gone */
87 *(ULONG_PTR*)&Waiter->Lock ^= (LQ_OWN | LQ_WAIT);
88 LockHandle->Next = NULL;
89 #endif
90 }
91
92 #else
93 //
94 // HACK: Hacked to work like normal spinlocks
95 //
96
97 VOID
98 FASTCALL
99 KeAcquireQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
100 {
101 #ifdef CONFIG_SMP
102 /* Make sure we are at DPC or above! */
103 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
104 {
105 /* We aren't -- bugcheck */
106 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
107 (ULONG_PTR)LockHandle->Lock,
108 KeGetCurrentIrql(),
109 0,
110 0);
111 }
112
113 /* Do the inlined function */
114 KxAcquireSpinLock(LockHandle->Lock);
115 #endif
116 }
117
118 VOID
119 FASTCALL
120 KeReleaseQueuedSpinLockFromDpcLevel(IN PKSPIN_LOCK_QUEUE LockHandle)
121 {
122 #ifdef CONFIG_SMP
123 /* Make sure we are at DPC or above! */
124 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
125 {
126 /* We aren't -- bugcheck */
127 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
128 (ULONG_PTR)LockHandle->Lock,
129 KeGetCurrentIrql(),
130 0,
131 0);
132 }
133
134 /* Do the inlined function */
135 KxReleaseSpinLock(LockHandle->Lock);
136 #endif
137 }
138
139 #endif
140
141 /* PUBLIC FUNCTIONS **********************************************************/
142
143 /*
144 * @implemented
145 */
146 KIRQL
147 NTAPI
148 KeAcquireInterruptSpinLock(IN PKINTERRUPT Interrupt)
149 {
150 KIRQL OldIrql;
151
152 /* Raise IRQL */
153 KeRaiseIrql(Interrupt->SynchronizeIrql, &OldIrql);
154
155 /* Acquire spinlock on MP */
156 KeAcquireSpinLockAtDpcLevel(Interrupt->ActualLock);
157 return OldIrql;
158 }
159
160 /*
161 * @implemented
162 */
163 VOID
164 NTAPI
165 KeReleaseInterruptSpinLock(IN PKINTERRUPT Interrupt,
166 IN KIRQL OldIrql)
167 {
168 /* Release lock on MP */
169 KeReleaseSpinLockFromDpcLevel(Interrupt->ActualLock);
170
171 /* Lower IRQL */
172 KeLowerIrql(OldIrql);
173 }
174
175 /*
176 * @implemented
177 */
178 VOID
179 NTAPI
180 _KeInitializeSpinLock(IN PKSPIN_LOCK SpinLock)
181 {
182 /* Clear it */
183 *SpinLock = 0;
184 }
185
186 /*
187 * @implemented
188 */
189 #undef KeAcquireSpinLockAtDpcLevel
190 VOID
191 NTAPI
192 KeAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)
193 {
194 /* Make sure we are at DPC or above! */
195 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
196 {
197 /* We aren't -- bugcheck */
198 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
199 (ULONG_PTR)SpinLock,
200 KeGetCurrentIrql(),
201 0,
202 0);
203 }
204
205 /* Do the inlined function */
206 KxAcquireSpinLock(SpinLock);
207 }
208
209 /*
210 * @implemented
211 */
212 #undef KeReleaseSpinLockFromDpcLevel
213 VOID
214 NTAPI
215 KeReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)
216 {
217 /* Make sure we are at DPC or above! */
218 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
219 {
220 /* We aren't -- bugcheck */
221 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
222 (ULONG_PTR)SpinLock,
223 KeGetCurrentIrql(),
224 0,
225 0);
226 }
227
228 /* Do the inlined function */
229 KxReleaseSpinLock(SpinLock);
230 }
231
232 /*
233 * @implemented
234 */
235 VOID
236 FASTCALL
237 KefAcquireSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock)
238 {
239 /* Make sure we are at DPC or above! */
240 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
241 {
242 /* We aren't -- bugcheck */
243 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
244 (ULONG_PTR)SpinLock,
245 KeGetCurrentIrql(),
246 0,
247 0);
248 }
249
250 /* Do the inlined function */
251 KxAcquireSpinLock(SpinLock);
252 }
253
254 /*
255 * @implemented
256 */
257 VOID
258 FASTCALL
259 KefReleaseSpinLockFromDpcLevel(IN PKSPIN_LOCK SpinLock)
260 {
261 /* Make sure we are at DPC or above! */
262 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
263 {
264 /* We aren't -- bugcheck */
265 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
266 (ULONG_PTR)SpinLock,
267 KeGetCurrentIrql(),
268 0,
269 0);
270 }
271
272 /* Do the inlined function */
273 KxReleaseSpinLock(SpinLock);
274 }
275
276 /*
277 * @implemented
278 */
279 VOID
280 FASTCALL
281 KiAcquireSpinLock(IN PKSPIN_LOCK SpinLock)
282 {
283 /* Do the inlined function */
284 KxAcquireSpinLock(SpinLock);
285 }
286
287 /*
288 * @implemented
289 */
290 VOID
291 FASTCALL
292 KiReleaseSpinLock(IN PKSPIN_LOCK SpinLock)
293 {
294 /* Do the inlined function */
295 KxReleaseSpinLock(SpinLock);
296 }
297
298 /*
299 * @implemented
300 */
301 BOOLEAN
302 FASTCALL
303 KeTryToAcquireSpinLockAtDpcLevel(IN OUT PKSPIN_LOCK SpinLock)
304 {
305 #ifdef CONFIG_SMP
306 /* Check if it's already acquired */
307 if (!(*SpinLock))
308 {
309 /* Try to acquire it */
310 if (InterlockedBitTestAndSet((PLONG)SpinLock, 0))
311 {
312 /* Someone else acquired it */
313 return FALSE;
314 }
315 }
316 else
317 {
318 /* It was already acquired */
319 return FALSE;
320 }
321
322 #if DBG
323 /* On debug builds, we OR in the KTHREAD */
324 *SpinLock = (ULONG_PTR)KeGetCurrentThread() | 1;
325 #endif
326 #endif
327
328 /* All is well, return TRUE */
329 return TRUE;
330 }
331
332 /*
333 * @implemented
334 */
335 VOID
336 FASTCALL
337 KeAcquireInStackQueuedSpinLockAtDpcLevel(IN PKSPIN_LOCK SpinLock,
338 IN PKLOCK_QUEUE_HANDLE LockHandle)
339 {
340 #ifdef CONFIG_SMP
341 /* Set it up properly */
342 LockHandle->LockQueue.Next = NULL;
343 LockHandle->LockQueue.Lock = SpinLock;
344 #if 0
345 KeAcquireQueuedSpinLockAtDpcLevel(LockHandle->LockQueue.Next);
346 #else
347 /* Make sure we are at DPC or above! */
348 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
349 {
350 /* We aren't -- bugcheck */
351 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
352 (ULONG_PTR)LockHandle->LockQueue.Lock,
353 KeGetCurrentIrql(),
354 0,
355 0);
356 }
357
358 /* Acquire the lock */
359 KxAcquireSpinLock(LockHandle->LockQueue.Lock); // HACK
360 #endif
361 #endif
362 }
363
364 /*
365 * @implemented
366 */
367 VOID
368 FASTCALL
369 KeReleaseInStackQueuedSpinLockFromDpcLevel(IN PKLOCK_QUEUE_HANDLE LockHandle)
370 {
371 #ifdef CONFIG_SMP
372 #if 0
373 /* Call the internal function */
374 KeReleaseQueuedSpinLockFromDpcLevel(LockHandle->LockQueue.Next);
375 #else
376 /* Make sure we are at DPC or above! */
377 if (KeGetCurrentIrql() < DISPATCH_LEVEL)
378 {
379 /* We aren't -- bugcheck */
380 KeBugCheckEx(IRQL_NOT_GREATER_OR_EQUAL,
381 (ULONG_PTR)LockHandle->LockQueue.Lock,
382 KeGetCurrentIrql(),
383 0,
384 0);
385 }
386
387 /* Release the lock */
388 KxReleaseSpinLock(LockHandle->LockQueue.Lock); // HACK
389 #endif
390 #endif
391 }
392
393 /*
394 * @unimplemented
395 */
396 KIRQL
397 FASTCALL
398 KeAcquireSpinLockForDpc(IN PKSPIN_LOCK SpinLock)
399 {
400 UNIMPLEMENTED;
401 return 0;
402 }
403
404 /*
405 * @unimplemented
406 */
407 VOID
408 FASTCALL
409 KeReleaseSpinLockForDpc(IN PKSPIN_LOCK SpinLock,
410 IN KIRQL OldIrql)
411 {
412 UNIMPLEMENTED;
413 }
414
415 /*
416 * @unimplemented
417 */
418 KIRQL
419 FASTCALL
420 KeAcquireInStackQueuedSpinLockForDpc(IN PKSPIN_LOCK SpinLock,
421 IN PKLOCK_QUEUE_HANDLE LockHandle)
422 {
423 UNIMPLEMENTED;
424 return 0;
425 }
426
427 /*
428 * @unimplemented
429 */
430 VOID
431 FASTCALL
432 KeReleaseInStackQueuedSpinLockForDpc(IN PKLOCK_QUEUE_HANDLE LockHandle)
433 {
434 UNIMPLEMENTED;
435 }
436
437 /*
438 * @implemented
439 */
440 BOOLEAN
441 FASTCALL
442 KeTestSpinLock(IN PKSPIN_LOCK SpinLock)
443 {
444 /* Test this spinlock */
445 if (*SpinLock)
446 {
447 /* Spinlock is busy, yield execution */
448 YieldProcessor();
449
450 /* Return busy flag */
451 return FALSE;
452 }
453
454 /* Spinlock appears to be free */
455 return TRUE;
456 }
457
458 #ifdef _M_IX86
459 VOID
460 NTAPI
461 Kii386SpinOnSpinLock(PKSPIN_LOCK SpinLock, ULONG Flags)
462 {
463 // FIXME: Handle flags
464 UNREFERENCED_PARAMETER(Flags);
465
466 /* Spin until it's unlocked */
467 while (*(volatile KSPIN_LOCK *)SpinLock & 1)
468 {
469 // FIXME: Check for timeout
470
471 /* Yield and keep looping */
472 YieldProcessor();
473 }
474 }
475 #endif