[ASM]
[reactos.git] / reactos / ntoskrnl / ex / i386 / fastinterlck_asm.S
1 /*
2 * COPYRIGHT: See COPYING in the top level directory
3 * PROJECT: ReactOS kernel
4 * FILE: ntoskrnl/ex/i386/fastinterlck_asm.S
5 * PURPOSE: FASTCALL Interlocked Functions
6 * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)
7 */
8
9 /* INCLUDES ******************************************************************/
10
11 #include <asm.inc>
12 #include <ks386.inc>
13 #include <internal/i386/asmmacro.S>
14
15 /* FUNCTIONS ****************************************************************/
16
17 .code32
18
19 /*
20 * NOTE: These functions must obey the following rules:
21 * - Acquire locks only on MP systems.
22 * - Be safe at HIGH_LEVEL (no paged access).
23 * - Preserve flags.
24 * - Disable interrups.
25 */
26
27 /*VOID
28 *FASTCALL
29 *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,
30 * IN ULONG Increment)
31 */
32 PUBLIC @ExInterlockedAddLargeStatistic@8
33 @ExInterlockedAddLargeStatistic@8:
34
35 #ifdef CONFIG_SMP
36 /* Do the addition */
37 lock add [ecx], edx
38
39 /* Check for carry bit and return */
40 jb .l1
41 ret
42
43 .l1:
44 /* Add carry */
45 lock adc dword ptr [ecx+4], 0
46 #else
47 /* Do the addition and add the carry */
48 add dword ptr [ecx], edx
49 adc dword ptr [ecx+4], 0
50 #endif
51 /* Return */
52 ret
53
54 /*ULONG
55 *FASTCALL
56 *ExfInterlockedAddUlong(IN PULONG Addend,
57 * IN ULONG Increment,
58 * IN PKSPIN_LOCK Lock)
59 */
60 PUBLIC @ExfInterlockedAddUlong@12
61 @ExfInterlockedAddUlong@12:
62
63 /* Save flags */
64 pushfd
65
66 #ifdef CONFIG_SMP
67 /* Get lock address */
68 mov eax, [esp+8]
69 .start1:
70 #endif
71 /* Disable interrupts */
72 cli
73
74 /* Acquire lock */
75 ACQUIRE_SPINLOCK(eax, .spin1)
76
77 /* Do the add */
78 mov eax, [ecx]
79 add [ecx], edx
80
81 #ifdef CONFIG_SMP
82 /* Get spinlock address and release it */
83 mov edx, [esp+8]
84 RELEASE_SPINLOCK(edx)
85 #endif
86
87 /* Restore flags and return */
88 popfd
89 ret 4
90
91 #ifdef CONFIG_SMP
92 .spin1:
93 /* Restore flags and spin */
94 popfd
95 pushfd
96 SPIN_ON_LOCK(eax, .start1)
97 #endif
98
99 /*PLIST_ENTRY
100 *FASTCALL
101 *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
102 * IN PLIST_ENTRY ListEntry,
103 * IN PKSPIN_LOCK Lock)
104 */
105 PUBLIC @ExfInterlockedInsertHeadList@12
106 @ExfInterlockedInsertHeadList@12:
107
108 #ifdef CONFIG_SMP
109 /* Save lock address */
110 push esi
111 mov esi, [esp+8]
112 #endif
113
114 /* Save flags and disable interrupts */
115 pushfd
116 .start2:
117 cli
118
119 /* Acquire lock */
120 ACQUIRE_SPINLOCK(esi, .spin2)
121
122 /* Get list pointer */
123 mov eax, [ecx]
124
125 /* Do the insert */
126 mov [edx], eax
127 mov [edx+4], ecx
128 mov [ecx], edx
129 mov [eax+4], edx
130
131 /* Release lock and restore flags */
132 RELEASE_SPINLOCK(esi)
133 popfd
134
135 #ifdef CONFIG_SMP
136 pop esi
137 #endif
138
139 /* Check if list was empty */
140 xor eax, ecx
141 jz .l2
142
143 /* Return list pointer */
144 xor eax, ecx
145 .l2:
146 ret 4
147
148 #ifdef CONFIG_SMP
149 .spin2:
150 popfd
151 pushfd
152 SPIN_ON_LOCK(esi, .start2)
153 #endif
154
155 /*PLIST_ENTRY
156 *FASTCALL
157 *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
158 * IN PLIST_ENTRY ListEntry,
159 * IN PKSPIN_LOCK Lock)
160 */
161 PUBLIC @ExfInterlockedInsertTailList@12
162 @ExfInterlockedInsertTailList@12:
163
164 #ifdef CONFIG_SMP
165 /* Save lock address */
166 push esi
167 mov esi, [esp+8]
168 #endif
169
170 /* Save flags and disable interrupts */
171 pushfd
172 .start3:
173 cli
174
175 /* Acquire lock */
176 ACQUIRE_SPINLOCK(esi, .spin3)
177
178 /* Get list pointer */
179 mov eax, [ecx+4]
180
181 /* Do the insert */
182 mov [edx], ecx
183 mov [edx+4], eax
184 mov [ecx+4], edx
185 mov [eax], edx
186
187 /* Release lock and restore flags */
188 RELEASE_SPINLOCK(esi)
189 popfd
190
191 #ifdef CONFIG_SMP
192 pop esi
193 #endif
194
195 /* Check if list was empty */
196 xor eax, ecx
197 jz .l3
198
199 /* Return list pointer */
200 xor eax, ecx
201 .l3:
202 ret 4
203
204 #ifdef CONFIG_SMP
205 .spin3:
206 popfd
207 pushfd
208 SPIN_ON_LOCK(esi, .start3)
209 #endif
210
211 /*PLIST_ENTRY
212 *FASTCALL
213 *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,
214 * IN PKSPIN_LOCK Lock)
215 */
216 PUBLIC @ExfInterlockedRemoveHeadList@8
217 @ExfInterlockedRemoveHeadList@8:
218
219 /* Save flags and disable interrupts */
220 .start4:
221 pushfd
222 cli
223 ACQUIRE_SPINLOCK(edx, .spin4)
224
225 /* Get list pointer */
226 mov eax, [ecx]
227
228 /* Check if it's empty */
229 cmp eax, ecx
230 je .l4
231
232 /* Get the next entry and do the deletion */
233 #ifdef CONFIG_SMP
234 push ebx
235 mov ebx, [eax]
236 mov [ecx], ebx
237 mov [ebx+4], ecx
238 #else
239 mov edx, [eax]
240 mov [ecx], edx
241 mov [edx+4], ecx
242 #endif
243
244 /* Release lock */
245 #ifdef CONFIG_SMP
246 RELEASE_SPINLOCK(edx)
247 pop ebx
248 #endif
249
250 /* Restore flags */
251 popfd
252
253 /* Return */
254 ret
255
256 .l4:
257 /* Release lock */
258 RELEASE_SPINLOCK(edx)
259
260 /* Restore flags */
261 popfd
262
263 /* Return empty list */
264 xor eax, eax
265 ret
266
267 #ifdef CONFIG_SMP
268 .spin4:
269 popfd
270 SPIN_ON_LOCK(edx, .start4)
271 #endif
272
273 /*PSINGLE_LIST_ENTRY
274 *FASTCALL
275 *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
276 * IN PKSPIN_LOCK Lock)
277 */
278 PUBLIC @ExfInterlockedPopEntryList@8
279 @ExfInterlockedPopEntryList@8:
280
281 /* Save flags and disable interrupts */
282 .start5:
283 pushfd
284 cli
285 ACQUIRE_SPINLOCK(edx, .spin5)
286
287 /* Get list pointer */
288 mov eax, [ecx]
289
290 /* Check if it's empty */
291 or eax, eax
292 je .l6
293
294 /* Get next entry and do deletion */
295 #ifdef CONFIG_SMP
296 push edx
297 #endif
298 mov edx, [eax]
299 mov [ecx], edx
300 #ifdef CONFIG_SMP
301 pop edx
302 #endif
303
304 .l5:
305 /* Release lock */
306 RELEASE_SPINLOCK(edx)
307
308 /* Restore flags */
309 popfd
310
311 /* Return */
312 ret
313
314 .l6:
315 /* Return empty list */
316 xor eax, eax
317 jmp .l5
318
319 #ifdef CONFIG_SMP
320 .spin5:
321 popfd
322 SPIN_ON_LOCK(edx, .start5)
323 #endif
324
325 /*PSINGLE_LIST_ENTRY
326 *FASTCALL
327 *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
328 * IN PSINGLE_LIST_ENTRY ListEntry,
329 * IN PKSPIN_LOCK Lock)
330 */
331 PUBLIC @ExfInterlockedPushEntryList@12
332 @ExfInterlockedPushEntryList@12:
333
334 /* Save flags */
335 pushfd
336
337 /* Save lock pointer */
338 #ifdef CONFIG_SMP
339 push edx
340 mov edx, [esp+12]
341 #endif
342
343 /* Disable interrupts */
344 .start6:
345 cli
346 #ifdef CONFIG_SMP
347 ACQUIRE_SPINLOCK(edx, .spin6)
348 pop edx
349 #endif
350
351 /* Get list pointer */
352 mov eax, [ecx]
353
354 /* Do push */
355 mov [edx], eax
356 mov [ecx], edx
357
358 /* Release lock */
359 #ifdef CONFIG_SMP
360 mov edx, [esp+8]
361 RELEASE_SPINLOCK(edx)
362 #endif
363
364 /* Restore flags */
365 popfd
366
367 /* Return */
368 ret 4
369
370 #ifdef CONFIG_SMP
371 .spin6:
372 pop edx
373 popfd
374 pushfd
375 push edx
376 mov edx, [esp+12]
377 SPIN_ON_LOCK(edx, .start6)
378 #endif
379
380 /*PSINGLE_LIST_ENTRY
381 *FASTCALL
382 *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
383 * IN PKSPIN_LOCK Lock)
384 */
385 PUBLIC @ExInterlockedPopEntrySList@8
386 PUBLIC @InterlockedPopEntrySList@4
387 PUBLIC _ExpInterlockedPopEntrySListResume@0
388 PUBLIC _ExpInterlockedPopEntrySListFault@0
389 PUBLIC _ExpInterlockedPopEntrySListEnd@0
390 @ExInterlockedPopEntrySList@8:
391 @InterlockedPopEntrySList@4:
392
393 /* Save registers */
394 push ebx
395 push ebp
396
397 /* Pointer to list */
398 mov ebp, ecx
399
400 /* Get sequence number and link pointer */
401 _ExpInterlockedPopEntrySListResume@0:
402 mov edx, [ebp+4]
403 mov eax, [ebp]
404
405 /* Check if the list is empty */
406 or eax, eax
407 jz .l7
408
409 /* Copy sequence number and adjust it */
410 lea ecx, [edx-1]
411
412 /* Get next pointer and do the exchange */
413 _ExpInterlockedPopEntrySListFault@0:
414 mov ebx, [eax]
415 _ExpInterlockedPopEntrySListEnd@0:
416 LOCK cmpxchg8b qword ptr [ebp]
417 jnz _ExpInterlockedPopEntrySListResume@0
418
419 /* Restore registers and return */
420 .l7:
421 pop ebp
422 pop ebx
423 ret
424
425 /*PSINGLE_LIST_ENTRY
426 *FASTCALL
427 *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
428 * IN PSINGLE_LIST_ENTRY ListEntry,
429 * IN PKSPIN_LOCK Lock)
430 */
431 PUBLIC @ExInterlockedPushEntrySList@12
432 @ExInterlockedPushEntrySList@12:
433
434 /* So we can fall through below */
435 pop [esp]
436
437 PUBLIC @InterlockedPushEntrySList@8
438 @InterlockedPushEntrySList@8:
439
440 /* Save registers */
441 push ebx
442 push ebp
443
444 /* Pointer to list */
445 mov ebp, ecx
446 mov ebx, edx
447
448 /* Get sequence number and link pointer */
449 mov edx, [ebp+4]
450 mov eax, [ebp]
451
452 .l8:
453 /* Set link pointer */
454 mov [ebx], eax
455
456 /* Copy sequence number and adjust it */
457 lea ecx, [edx + HEX(10001)]
458
459 /* Do the exchange */
460 LOCK cmpxchg8b qword ptr [ebp]
461 jnz .l8
462
463 /* Restore registers and return */
464 pop ebp
465 pop ebx
466 ret
467
468 /*PSINGLE_LIST_ENTRY
469 *FASTCALL
470 *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)
471 */
472 PUBLIC @ExInterlockedFlushSList@4
473 @ExInterlockedFlushSList@4:
474
475 /* Save registers */
476 push ebx
477 push ebp
478
479 /* Clear ebx */
480 xor ebx, ebx
481
482 /* Pointer to list */
483 mov ebp, ecx
484
485 /* Get sequence number and link pointer */
486 mov edx, [ebp+4]
487 mov eax, [ebp]
488
489 .l9:
490 /* Check if the list is empty */
491 or eax, eax
492 jz .l10
493
494 /* Clear sequence and pointer */
495 mov ecx, edx
496 mov cx, bx
497
498 /* Do the exchange */
499 LOCK cmpxchg8b qword ptr [ebp]
500 jnz .l9
501
502 /* Restore registers and return */
503 .l10:
504 pop ebp
505 pop ebx
506 ret
507
508 /*INTERLOCKED_RESULT
509 *FASTCALL
510 *Exfi386InterlockedIncrementLong(IN PLONG Addend)
511 */
512 PUBLIC @Exfi386InterlockedIncrementLong@4
513 @Exfi386InterlockedIncrementLong@4:
514
515 /* Do the op */
516 LOCK add dword ptr [ecx], 1
517
518 /* Return */
519 lahf
520 and eax, EFLAG_SELECT
521 ret
522
523 /*INTERLOCKED_RESULT
524 *FASTCALL
525 *Exfi386InterlockedDecrementLong(IN PLONG Addend)
526 */
527 PUBLIC @Exfi386InterlockedDecrementLong@4
528 @Exfi386InterlockedDecrementLong@4:
529
530 /* Do the op */
531 LOCK sub dword ptr [ecx], 1
532
533 /* Return */
534 lahf
535 and eax, EFLAG_SELECT
536 ret
537
538 /*ULONG
539 *FASTCALL
540 *Exfi386InterlockedExchangeUlong(IN PULONG Taget,
541 * IN ULONG Value)
542 */
543 PUBLIC @Exfi386InterlockedExchangeUlong@8
544 @Exfi386InterlockedExchangeUlong@8:
545
546 #ifdef CONFIG_SMP
547 /* On MP, do the exchange */
548 xchg [ecx], edx
549 mov eax, edx
550 #else
551 /* On UP, use cmpxchg */
552 mov eax, [ecx]
553 .l11:
554 cmpxchg [ecx], edx
555 jnz .l11
556 #endif
557
558 /* Return */
559 ret
560
561 /*PVOID
562 *FASTCALL
563 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
564 * IN PLONGLONG Exchange,
565 * IN PLONGLONG Comperand)
566 */
567 PUBLIC @ExfInterlockedCompareExchange64@12
568 @ExfInterlockedCompareExchange64@12:
569
570 /* Save registers */
571 push ebx
572 push ebp
573
574 /* Get destination pointer, exchange value and comperand value/address */
575 mov ebp, ecx
576 mov ebx, [edx]
577 mov ecx, [edx+4]
578 mov edx, [esp+12]
579 mov eax, [edx]
580 mov edx, [edx+4]
581
582 /* Do the op */
583 LOCK cmpxchg8b qword ptr [ebp]
584
585 /* Restore volatiles */
586 pop ebp
587 pop ebx
588
589 /* Return */
590 ret 4
591
592 /*PVOID
593 *FASTCALL
594 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
595 * IN PLONGLONG Exchange,
596 * IN PLONGLONG Comperand,
597 * IN PKSPIN_LOCK Lock)
598 */
599 PUBLIC @ExInterlockedCompareExchange64@16
600 @ExInterlockedCompareExchange64@16:
601
602 /* Save registers */
603 push ebp
604 push ebp
605
606 /* Get destination pointer, exchange value and comperand value/address */
607 mov ebp, ecx
608 mov ebx, [edx]
609 mov ecx, [edx+4]
610 mov edx, [esp+12]
611 mov eax, [edx]
612 mov edx, [edx+4]
613
614 /* Do the op */
615 LOCK cmpxchg8b qword ptr [ebp]
616
617 /* Restore volatiles */
618 pop ebp
619 pop ebx
620
621 /* Return */
622 ret 8
623
624 /*** Non-586 functions ***/
625
626 /*PSINGLE_LIST_ENTRY
627 *FASTCALL
628 *ExfInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
629 * IN PKSPIN_LOCK Lock)
630 */
631 PUBLIC @ExfInterlockedPopEntrySList@8
632 @ExfInterlockedPopEntrySList@8:
633
634 /* Save flags */
635 .starta:
636 pushfd
637
638 /* Disable interrupts */
639 cli
640
641 /* Acquire lock */
642 ACQUIRE_SPINLOCK(edx, .spina)
643
644 /* Get the next link and check if it's empty */
645 mov eax, [ecx]
646 or eax, eax
647 jz .l12
648
649 /* Get address of the next link and store it */
650 push [eax]
651 pop [ecx]
652
653 /* Decrement list depth */
654 dec dword ptr [ecx+4]
655
656 .l12:
657 #ifdef CONFIG_SMP
658 /* Release spinlock */
659 RELEASE_SPINLOCK(edx)
660 #endif
661
662 /* Restore flags and return */
663 popfd
664 ret 0
665
666 #ifdef CONFIG_SMP
667 .spina:
668 /* Restore flags and spin */
669 popfd
670 SPIN_ON_LOCK(edx, .starta)
671 #endif
672
673 /*PSINGLE_LIST_ENTRY
674 *FASTCALL
675 *ExfInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
676 * IN PSINGLE_LIST_ENTRY ListEntry,
677 * IN PKSPIN_LOCK Lock)
678 */
679 PUBLIC @ExfInterlockedPushEntrySList@12
680 @ExfInterlockedPushEntrySList@12:
681
682 /* Save flags */
683 .startb:
684 pushfd
685
686 /* Disable interrupts */
687 cli
688
689 /* Acquire lock */
690 #ifndef CONFIG_SMP
691 mov eax, [esp+8]
692 ACQUIRE_SPINLOCK(eax, .spinb)
693 #endif
694
695 /* Get the next link and check if it's empty */
696 push [ecx]
697
698 /* Get address of the next link and store it */
699 pop [edx]
700 mov [ecx], edx
701
702 /* Increment list depth */
703 inc dword ptr [ecx+4]
704
705 #ifdef CONFIG_SMP
706 /* Release spinlock */
707 RELEASE_SPINLOCK(eax)
708 #endif
709
710 /* Restore flags and return */
711 popfd
712 ret 4
713
714 #ifdef CONFIG_SMP
715 .spinb:
716 /* Restore flags and spin */
717 popfd
718 SPIN_ON_LOCK(eax, .startb)
719 #endif
720
721 /*PVOID
722 *FASTCALL
723 *ExpInterlockedCompareExchange64(IN PLONGLONG Destination,
724 * IN PLONGLONG Exchange,
725 * IN PLONGLONG Comperand,
726 * IN PKSPIN_LOCK Lock)
727 */
728 PUBLIC @ExpInterlockedCompareExchange64@16
729 @ExpInterlockedCompareExchange64@16:
730
731 /* Save registers */
732 push ebp
733 push ebp
734
735 /* Get destination pointer, exchange value and comperand value/address */
736 mov ebp, ecx
737 mov ebx, [edx]
738 mov ecx, [edx+4]
739 mov edx, [esp+12]
740 mov eax, [edx]
741 mov edx, [edx+4]
742
743 #ifdef CONFIG_SMP
744 /* Save ESI so we can store KSPINLOCK in it */
745 push esi
746
747 /* Save flags and lock, and disable interrupts */
748 pushfd
749 mov esi, [esp+24]
750 .startc:
751 cli
752
753 /* Acquire the spinlock */
754 ACQUIRE_SPINLOCK(esi, .spinc)
755 #else
756 /* Save flags and disable interrupts */
757 pushfd
758 cli
759 #endif
760
761 /* Compare bottom */
762 cmp eax, [ebp]
763 jne NoMatch
764
765 /* Compare top */
766 cmp edx, [ebp+4]
767 jne NoMatch
768
769 /* Save new value */
770 mov [ebp], ebx
771 mov [ebp+4], ecx
772
773 AfterSave:
774 #ifdef CONFIG_SMP
775 /* Release lock, restore volatiles and flags */
776 RELEASE_SPINLOCK(esi)
777 popfd
778 pop esi
779 #else
780 popfd
781 #endif
782
783 /* Restore the other volatiles and return */
784 pop ebp
785 pop ebx
786
787 /* Return */
788 ret 8
789
790 NoMatch:
791 /* Return the current value */
792 mov eax, [ebp]
793 mov edx, [ebp+4]
794 jmp AfterSave
795
796 #ifdef CONFIG_SMP
797 .spinc:
798 /* Restore flags and spin */
799 popfd
800 pushfd
801 SPIN_ON_LOCK(esi, .startc)
802 #endif
803
804 END
805 /* EOF */