- Rewrite Executive Interlocked Functions to actually work as supposed and also immen...
[reactos.git] / reactos / ntoskrnl / ex / i386 / fastinterlck_asm.S
1 /*
2 * COPYRIGHT: See COPYING in the top level directory
3 * PROJECT: ReactOS kernel
4 * FILE: ntoskrnl/ex/i386/fastinterlck_asm.S
5 * PURPOSE: FASTCALL Interlocked Functions
6 * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)
7 */
8
9 /* INCLUDES ******************************************************************/
10 #include <ndk/asm.h>
11 .intel_syntax noprefix
12
13 //#define CONFIG_SMP
14
15 #ifdef CONFIG_SMP
16 #define LOCK lock
17 #define ACQUIRE_SPINLOCK(x, y) \
18 lock bts dword ptr [x], 0; \
19 jb y
20 #define RELEASE_SPINLOCK(x) mov byte ptr [x], 0
21 #define SPIN_ON_LOCK(x, y) \
22 1: \
23 test dword ptr [x], 1; \
24 jz y; \
25 pause; \
26 jmp 1b
27 #else
28 #define LOCK
29 #define ACQUIRE_SPINLOCK(x, y)
30 #define RELEASE_SPINLOCK(x)
31 #endif
32
33 /* FUNCTIONS ****************************************************************/
34
35 /*
36 * NOTE: These functions must obey the following rules:
37 * - Acquire locks only on MP systems.
38 * - Be safe at HIGH_LEVEL (no paged access).
39 * - Preserve flags.
40 * - Disable interrups.
41 */
42
43 /*VOID
44 *FASTCALL
45 *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,
46 * IN ULONG Increment)
47 */
48 .global @ExInterlockedAddLargeStatistic@8
49 @ExInterlockedAddLargeStatistic@8:
50
51 #ifdef CONFIG_SMP
52 /* Do the addition */
53 lock add [ecx], edx
54
55 /* Check for carry bit and return */
56 jb 1f
57 ret
58
59 1:
60 /* Add carry */
61 lock adc dword ptr [ecx+4], 0
62 #else
63 /* Do the addition and add the carry */
64 add dword ptr [ecx], edx
65 adc dword ptr [ecx+4], 0
66 #endif
67 /* Return */
68 ret
69
70 /*ULONG
71 *FASTCALL
72 *ExfInterlockedAddUlong(IN PULONG Addend,
73 * IN ULONG Increment,
74 * IN PKSPIN_LOCK Lock)
75 */
76 .global @ExfInterlockedAddUlong@12
77 @ExfInterlockedAddUlong@12:
78
79 /* Save flags */
80 pushfd
81
82 #ifdef CONFIG_SMP
83 /* Get lock address */
84 mov eax, [esp+8]
85 .start1:
86 #endif
87 /* Disable interrupts */
88 cli
89
90 /* Acquire lock */
91 ACQUIRE_SPINLOCK(eax, .spin1)
92
93 /* Do the add */
94 mov eax, [ecx]
95 add [ecx], edx
96
97 #ifdef CONFIG_SMP
98 /* Get spinlock address and release it */
99 mov edx, [esp+8]
100 RELEASE_SPINLOCK(edx)
101 #endif
102
103 /* Restore flags and return */
104 popfd
105 ret 4
106
107 #ifdef CONFIG_SMP
108 .spin1:
109 /* Restore flags and spin */
110 popfd
111 pushfd
112 SPIN_ON_LOCK(eax, .start1)
113 #endif
114
115 /*PLIST_ENTRY
116 *FASTCALL
117 *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
118 * IN PLIST_ENTRY ListEntry,
119 * IN PKSPIN_LOCK Lock)
120 */
121 .global @ExfInterlockedInsertHeadList@12
122 @ExfInterlockedInsertHeadList@12:
123
124 #ifdef CONFIG_SMP
125 /* Save lock address */
126 push esi
127 mov esi, [esp+8]
128 #endif
129
130 /* Save flags and disable interrupts */
131 pushfd
132 .start2:
133 cli
134
135 /* Acquire lock */
136 ACQUIRE_SPINLOCK(esi, .spin2)
137
138 /* Get list pointer */
139 mov eax, [ecx]
140
141 /* Do the insert */
142 mov [edx], eax
143 mov [edx+4], ecx
144 mov [ecx], edx
145 mov [eax+4], edx
146
147 /* Release lock and restore flags */
148 RELEASE_SPINLOCK(esi)
149 popfd
150
151 #ifdef CONFIG_SMP
152 pop esi
153 #endif
154
155 /* Check if list was empty */
156 xor eax, ecx
157 jz 2f
158
159 /* Return list pointer */
160 xor eax, ecx
161 2:
162 ret 4
163
164 #ifdef CONFIG_SMP
165 .spin2:
166 popfd
167 pushfd
168 SPIN_ON_LOCK(esi, .start2)
169 #endif
170
171 /*PLIST_ENTRY
172 *NTAPI
173 *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
174 * IN PLIST_ENTRY ListEntry,
175 * IN PKSPIN_LOCK Lock)
176 */
177 .global @ExfInterlockedInsertTailList@12
178 @ExfInterlockedInsertTailList@12:
179
180 #ifdef CONFIG_SMP
181 /* Save lock address */
182 push esi
183 mov esi, [esp+8]
184 #endif
185
186 /* Save flags and disable interrupts */
187 pushfd
188 .start3:
189 cli
190
191 /* Acquire lock */
192 ACQUIRE_SPINLOCK(esi, .spin3)
193
194 /* Get list pointer */
195 mov eax, [ecx+4]
196
197 /* Do the insert */
198 mov [edx], ecx
199 mov [edx+4], eax
200 mov [ecx+4], edx
201 mov [eax], edx
202
203 /* Release lock and restore flags */
204 RELEASE_SPINLOCK(esi)
205 popfd
206
207 #ifdef CONFIG_SMP
208 pop esi
209 #endif
210
211 /* Check if list was empty */
212 xor eax, ecx
213 jz 2f
214
215 /* Return list pointer */
216 xor eax, ecx
217 2:
218 ret 4
219
220 #ifdef CONFIG_SMP
221 .spin3:
222 popfd
223 pushfd
224 SPIN_ON_LOCK(esi, .start3)
225 #endif
226
227 /*PLIST_ENTRY
228 *FASTCALL
229 *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,
230 * IN PKSPIN_LOCK Lock)
231 */
232 .global @ExfInterlockedRemoveHeadList@8
233 @ExfInterlockedRemoveHeadList@8:
234
235 /* Save flags and disable interrupts */
236 .start4:
237 pushfd
238 cli
239 ACQUIRE_SPINLOCK(edx, .spin4)
240
241 /* Get list pointer */
242 mov eax, [ecx]
243
244 /* Check if it's empty */
245 cmp eax, ecx
246 je 2f
247
248 /* Get the next entry and do the deletion*/
249 #ifdef CONFIG_SMP
250 push ebx
251 mov ebx, [eax]
252 mov [ecx], ebx
253 mov [ebx+4], ecx
254 #else
255 mov edx, [eax]
256 mov [ecx], edx
257 mov [edx+4], ecx
258 #endif
259
260 /* Release lock */
261 #ifdef CONFIG_SMP
262 RELEASE_SPINLOCK(edx)
263 pop ebx
264 #endif
265
266 /* Restore flags */
267 popfd
268
269 /* Return */
270 ret
271
272 2:
273 /* Release lock */
274 RELEASE_SPINLOCK(edx)
275
276 /* Restore flags */
277 popfd
278
279 /* Return empty list */
280 xor eax, eax
281 ret
282
283 #ifdef CONFIG_SMP
284 .spin4:
285 popfd
286 SPIN_ON_LOCK(edx, .start4)
287 #endif
288
289 /*PSINGLE_LIST_ENTRY
290 *FASTCALL
291 *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
292 * IN PKSPIN_LOCK Lock)
293 */
294 .global @ExfInterlockedPopEntryList@8
295 @ExfInterlockedPopEntryList@8:
296
297 /* Save flags and disable interrupts */
298 .start5:
299 pushfd
300 cli
301 ACQUIRE_SPINLOCK(edx, .spin5)
302
303 /* Get list pointer */
304 mov eax, [ecx]
305
306 /* Check if it's empty */
307 or eax, eax
308 je 3f
309
310 /* Get next entry and do deletion */
311 #ifdef CONFIG_SMP
312 push edx
313 #endif
314 mov edx, [eax]
315 mov [ecx], edx
316 #ifdef CONFIG_SMP
317 pop edx
318 #endif
319
320 2:
321 /* Release lock */
322 RELEASE_SPINLOCK(edx)
323
324 /* Restore flags */
325 popfd
326
327 /* Return */
328 ret
329
330 3:
331 /* Return empty list */
332 xor eax, eax
333 jmp 2b
334
335 #ifdef CONFIG_SMP
336 .spin5:
337 popfd
338 SPIN_ON_LOCK(edx, .start5)
339 #endif
340
341 /*PSINGLE_LIST_ENTRY
342 *NTAPI
343 *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
344 * IN PSINGLE_LIST_ENTRY ListEntry,
345 * IN PKSPIN_LOCK Lock)
346 */
347 .global @ExfInterlockedPushEntryList@12
348 @ExfInterlockedPushEntryList@12:
349
350 /* Save flags */
351 pushfd
352
353 /* Save lock pointer */
354 #ifdef CONFIG_SMP
355 push edx
356 mov edx, [esp+12]
357 #endif
358
359 /* Disable interrupts */
360 .start6:
361 cli
362 #ifdef CONFIG_SMP
363 ACQUIRE_SPINLOCK(edx, .spin6)
364 pop edx
365 #endif
366
367 /* Get list pointer */
368 mov eax, [ecx]
369
370 /* Do push */
371 mov [edx], eax
372 mov [ecx], edx
373
374 /* Release lock */
375 #ifdef CONFIG_SMP
376 mov edx, [esp+8]
377 RELEASE_SPINLOCK(edx)
378 #endif
379
380 /* Restore flags */
381 popfd
382
383 /* Return */
384 ret 4
385
386 #ifdef CONFIG_SMP
387 .spin6:
388 pop edx
389 popfd
390 pushfd
391 push edx
392 mov edx, [esp+12]
393 SPIN_ON_LOCK(edx, .start6)
394 #endif
395
396 /*PSINGLE_LIST_ENTRY
397 *NTAPI
398 *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
399 * IN PKSPIN_LOCK Lock)
400 */
401 .global @ExInterlockedPopEntrySList@8
402 .global @InterlockedPopEntrySList@4
403 @ExInterlockedPopEntrySList@8:
404 @InterlockedPopEntrySList@4:
405
406 /* Save registers */
407 push ebx
408 push ebp
409
410 /* Pointer to list */
411 mov ebp, ecx
412
413 /* Get sequence number and link pointer */
414 mov edx, [ebp+4]
415 mov eax, [ebp]
416
417 1:
418 /* Check if the list is empty */
419 or eax, eax
420 jz 2f
421
422 /* Copy sequence number and adjust it */
423 lea ecx, [edx-1]
424
425 /* Get next pointer and do the exchange */
426 mov ebx, [eax]
427 LOCK cmpxchg8b qword ptr [ebp]
428 jnz 1b
429
430 /* Restore registers and return */
431 2:
432 pop ebp
433 pop ebx
434 ret
435
436 /*PSINGLE_LIST_ENTRY
437 *NTAPI
438 *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
439 * IN PSINGLE_LIST_ENTRY ListEntry,
440 * IN PKSPIN_LOCK Lock)
441 */
442 .global @ExInterlockedPushEntrySList@12
443 @ExInterlockedPushEntrySList@12:
444
445 /* So we can fall through below */
446 pop [esp]
447
448 .global @InterlockedPushEntrySList@8
449 @InterlockedPushEntrySList@8:
450
451 /* Save registers */
452 push ebx
453 push ebp
454
455 /* Pointer to list */
456 mov ebp, ecx
457 mov ebx, edx
458
459 /* Get sequence number and link pointer */
460 mov edx, [ebp+4]
461 mov eax, [ebp]
462
463 1:
464 /* Set link pointer */
465 mov [ebx], eax
466
467 /* Copy sequence number and adjust it */
468 lea ecx, [edx+0x10001]
469
470 /* Do the exchange */
471 LOCK cmpxchg8b qword ptr [ebp]
472 jnz 1b
473
474 /* Restore registers and return */
475 2:
476 pop ebp
477 pop ebx
478 ret
479
480 /*PSINGLE_LIST_ENTRY
481 *NTAPI
482 *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)
483 */
484 .global @ExInterlockedFlushSList@4
485 @ExInterlockedFlushSList@4:
486
487 /* Save registers */
488 push ebx
489 push ebp
490
491 /* Clear ebx */
492 xor ebx, ebx
493
494 /* Pointer to list */
495 mov ebp, ecx
496
497 /* Get sequence number and link pointer */
498 mov edx, [ebp+4]
499 mov eax, [ebp]
500
501 1:
502 /* Check if the list is empty */
503 or eax, eax
504 jz 2f
505
506 /* Clear sequence and pointer */
507 mov ecx, edx
508 mov cx, bx
509
510 /* Do the exchange */
511 LOCK cmpxchg8b qword ptr [ebp]
512 jnz 1b
513
514 /* Restore registers and return */
515 2:
516 pop ebp
517 pop ebx
518 ret
519
520 /*INTERLOCKED_RESULT
521 *FASTCALL
522 *Exfi386InterlockedIncrementLong(IN PLONG Addend)
523 */
524 .global @Exfi386InterlockedIncrementLong@4
525 @Exfi386InterlockedIncrementLong@4:
526
527 /* Do the op */
528 LOCK add dword ptr [ecx], 1
529
530 /* Return */
531 lahf
532 and eax, EFLAG_SELECT
533 ret
534
535 /*INTERLOCKED_RESULT
536 *FASTCALL
537 *Exfi386InterlockedDecrementLong(IN PLONG Addend)
538 */
539 .global @Exfi386InterlockedDecrementLong@4
540 @Exfi386InterlockedDecrementLong@4:
541
542 /* Do the op */
543 LOCK sub dword ptr [ecx], 1
544
545 /* Return */
546 lahf
547 and eax, EFLAG_SELECT
548 ret
549
550 /*ULONG
551 *FASTCALL
552 *Exfi386InterlockedExchangeUlong(IN PULONG Taget,
553 * IN ULONG Value)
554 */
555 .global @Exfi386InterlockedExchangeUlong@8
556 .global @InterlockedExchange@8
557 @InterlockedExchange@8:
558 @Exfi386InterlockedExchangeUlong@8:
559
560 #ifdef CONFIG_SMP
561 /* On MP, do the exchange */
562 xchg [ecx], edx
563 mov eax, edx
564 #else
565 /* On UP, use cmpxchg */
566 mov eax, [ecx]
567 1:
568 cmpxchg [ecx], edx
569 jnz 1b
570 #endif
571
572 /* Return */
573 ret
574
575 /*ULONG
576 *FASTCALL
577 *InterlockedIncrement(IN PLONG Addend)
578 */
579 .global @InterlockedIncrement@4
580 @InterlockedIncrement@4:
581
582 /* Do the op */
583 mov eax, 1
584 LOCK xadd dword ptr [ecx], eax
585
586 /* Return */
587 inc eax
588 ret
589
590 /*ULONG
591 *FASTCALL
592 *InterlockedDecrement(IN PLONG Addend)
593 */
594 .global @InterlockedDecrement@4
595 @InterlockedDecrement@4:
596
597 /* Do the op */
598 mov eax, -1
599 LOCK xadd dword ptr [ecx], eax
600
601 /* Return */
602 dec eax
603 ret
604
605 /*PVOID
606 *FASTCALL
607 *InterlockedCompareExchange(IN OUT PVOID *Destination,
608 * IN PVOID Exchange,
609 * IN PVOID Comperand)
610 */
611 .global @InterlockedCompareExchange@12
612 @InterlockedCompareExchange@12:
613
614 /* Get comperand */
615 mov eax, [esp+4]
616
617 /* Do the op */
618 LOCK cmpxchg dword ptr [ecx], edx
619
620 /* Return */
621 ret
622
623 /*PVOID
624 *FASTCALL
625 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
626 * IN PLONGLONG Exchange,
627 * IN PLONGLONG Comperand)
628 */
629 .global @ExfInterlockedCompareExchange64@12
630 @ExfInterlockedCompareExchange64@12:
631
632 /* Save registers */
633 push ebp
634 push ebp
635
636 /* Get desination pointer, exchange value and comperand value/address */
637 mov ebp, ecx
638 mov ebx, [edx]
639 mov ecx, [edx+4]
640 mov edx, [esp+12]
641 mov eax, [edx]
642 mov edx, [edx+4]
643
644 /* Do the op */
645 LOCK cmpxchg8b qword ptr [ebp]
646
647 /* Restore volatiles */
648 pop ebp
649 pop ebx
650
651 /* Return */
652 ret 4
653
654 /*PVOID
655 *FASTCALL
656 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
657 * IN PLONGLONG Exchange,
658 * IN PLONGLONG Comperand,
659 * IN PKSPIN_LOCK Lock)
660 */
661 .global @ExInterlockedCompareExchange64@16
662 @ExInterlockedCompareExchange64@16:
663
664 /* Save registers */
665 push ebp
666 push ebp
667
668 /* Get desination pointer, exchange value and comperand value/address */
669 mov ebp, ecx
670 mov ebx, [edx]
671 mov ecx, [edx+4]
672 mov edx, [esp+12]
673 mov eax, [edx]
674 mov edx, [edx+4]
675
676 /* Do the op */
677 LOCK cmpxchg8b qword ptr [ebp]
678
679 /* Restore volatiles */
680 pop ebp
681 pop ebx
682
683 /* Return */
684 ret 8
685
686 /*PVOID
687 *FASTCALL
688 *InterlockedExchangeAdd(IN OUT PLONG Addend,
689 * IN LONG Increment)
690 */
691 .global @InterlockedExchangeAdd@8
692 @InterlockedExchangeAdd@8:
693
694 /* Do the op */
695 LOCK xadd dword ptr [ecx], edx
696
697 /* Return */
698 mov eax, edx
699 ret
700
701 /* EOF */