- Remove svn:needs-lock, svn:eol-type, and svn:eol-tyle properties.
[reactos.git] / reactos / ntoskrnl / ex / i386 / fastinterlck_asm.S
1 /*
2 * COPYRIGHT: See COPYING in the top level directory
3 * PROJECT: ReactOS kernel
4 * FILE: ntoskrnl/ex/i386/fastinterlck_asm.S
5 * PURPOSE: FASTCALL Interlocked Functions
6 * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)
7 */
8
9 /* INCLUDES ******************************************************************/
10 #include <ndk/asm.h>
11 #include <internal/i386/asmmacro.S>
12 .intel_syntax noprefix
13
14 /* FUNCTIONS ****************************************************************/
15
16 /*
17 * NOTE: These functions must obey the following rules:
18 * - Acquire locks only on MP systems.
19 * - Be safe at HIGH_LEVEL (no paged access).
20 * - Preserve flags.
21 * - Disable interrups.
22 */
23
24 /*VOID
25 *FASTCALL
26 *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,
27 * IN ULONG Increment)
28 */
29 .global @ExInterlockedAddLargeStatistic@8
30 @ExInterlockedAddLargeStatistic@8:
31
32 #ifdef CONFIG_SMP
33 /* Do the addition */
34 lock add [ecx], edx
35
36 /* Check for carry bit and return */
37 jb 1f
38 ret
39
40 1:
41 /* Add carry */
42 lock adc dword ptr [ecx+4], 0
43 #else
44 /* Do the addition and add the carry */
45 add dword ptr [ecx], edx
46 adc dword ptr [ecx+4], 0
47 #endif
48 /* Return */
49 ret
50
51 /*ULONG
52 *FASTCALL
53 *ExfInterlockedAddUlong(IN PULONG Addend,
54 * IN ULONG Increment,
55 * IN PKSPIN_LOCK Lock)
56 */
57 .global @ExfInterlockedAddUlong@12
58 @ExfInterlockedAddUlong@12:
59
60 /* Save flags */
61 pushfd
62
63 #ifdef CONFIG_SMP
64 /* Get lock address */
65 mov eax, [esp+8]
66 .start1:
67 #endif
68 /* Disable interrupts */
69 cli
70
71 /* Acquire lock */
72 ACQUIRE_SPINLOCK(eax, .spin1)
73
74 /* Do the add */
75 mov eax, [ecx]
76 add [ecx], edx
77
78 #ifdef CONFIG_SMP
79 /* Get spinlock address and release it */
80 mov edx, [esp+8]
81 RELEASE_SPINLOCK(edx)
82 #endif
83
84 /* Restore flags and return */
85 popfd
86 ret 4
87
88 #ifdef CONFIG_SMP
89 .spin1:
90 /* Restore flags and spin */
91 popfd
92 pushfd
93 SPIN_ON_LOCK(eax, .start1)
94 #endif
95
96 /*PLIST_ENTRY
97 *FASTCALL
98 *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
99 * IN PLIST_ENTRY ListEntry,
100 * IN PKSPIN_LOCK Lock)
101 */
102 .global @ExfInterlockedInsertHeadList@12
103 @ExfInterlockedInsertHeadList@12:
104
105 #ifdef CONFIG_SMP
106 /* Save lock address */
107 push esi
108 mov esi, [esp+8]
109 #endif
110
111 /* Save flags and disable interrupts */
112 pushfd
113 .start2:
114 cli
115
116 /* Acquire lock */
117 ACQUIRE_SPINLOCK(esi, .spin2)
118
119 /* Get list pointer */
120 mov eax, [ecx]
121
122 /* Do the insert */
123 mov [edx], eax
124 mov [edx+4], ecx
125 mov [ecx], edx
126 mov [eax+4], edx
127
128 /* Release lock and restore flags */
129 RELEASE_SPINLOCK(esi)
130 popfd
131
132 #ifdef CONFIG_SMP
133 pop esi
134 #endif
135
136 /* Check if list was empty */
137 xor eax, ecx
138 jz 2f
139
140 /* Return list pointer */
141 xor eax, ecx
142 2:
143 ret 4
144
145 #ifdef CONFIG_SMP
146 .spin2:
147 popfd
148 pushfd
149 SPIN_ON_LOCK(esi, .start2)
150 #endif
151
152 /*PLIST_ENTRY
153 *FASTCALL
154 *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
155 * IN PLIST_ENTRY ListEntry,
156 * IN PKSPIN_LOCK Lock)
157 */
158 .global @ExfInterlockedInsertTailList@12
159 @ExfInterlockedInsertTailList@12:
160
161 #ifdef CONFIG_SMP
162 /* Save lock address */
163 push esi
164 mov esi, [esp+8]
165 #endif
166
167 /* Save flags and disable interrupts */
168 pushfd
169 .start3:
170 cli
171
172 /* Acquire lock */
173 ACQUIRE_SPINLOCK(esi, .spin3)
174
175 /* Get list pointer */
176 mov eax, [ecx+4]
177
178 /* Do the insert */
179 mov [edx], ecx
180 mov [edx+4], eax
181 mov [ecx+4], edx
182 mov [eax], edx
183
184 /* Release lock and restore flags */
185 RELEASE_SPINLOCK(esi)
186 popfd
187
188 #ifdef CONFIG_SMP
189 pop esi
190 #endif
191
192 /* Check if list was empty */
193 xor eax, ecx
194 jz 2f
195
196 /* Return list pointer */
197 xor eax, ecx
198 2:
199 ret 4
200
201 #ifdef CONFIG_SMP
202 .spin3:
203 popfd
204 pushfd
205 SPIN_ON_LOCK(esi, .start3)
206 #endif
207
208 /*PLIST_ENTRY
209 *FASTCALL
210 *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,
211 * IN PKSPIN_LOCK Lock)
212 */
213 .global @ExfInterlockedRemoveHeadList@8
214 @ExfInterlockedRemoveHeadList@8:
215
216 /* Save flags and disable interrupts */
217 .start4:
218 pushfd
219 cli
220 ACQUIRE_SPINLOCK(edx, .spin4)
221
222 /* Get list pointer */
223 mov eax, [ecx]
224
225 /* Check if it's empty */
226 cmp eax, ecx
227 je 2f
228
229 /* Get the next entry and do the deletion */
230 #ifdef CONFIG_SMP
231 push ebx
232 mov ebx, [eax]
233 mov [ecx], ebx
234 mov [ebx+4], ecx
235 #else
236 mov edx, [eax]
237 mov [ecx], edx
238 mov [edx+4], ecx
239 #endif
240
241 /* Release lock */
242 #ifdef CONFIG_SMP
243 RELEASE_SPINLOCK(edx)
244 pop ebx
245 #endif
246
247 /* Restore flags */
248 popfd
249
250 /* Return */
251 ret
252
253 2:
254 /* Release lock */
255 RELEASE_SPINLOCK(edx)
256
257 /* Restore flags */
258 popfd
259
260 /* Return empty list */
261 xor eax, eax
262 ret
263
264 #ifdef CONFIG_SMP
265 .spin4:
266 popfd
267 SPIN_ON_LOCK(edx, .start4)
268 #endif
269
270 /*PSINGLE_LIST_ENTRY
271 *FASTCALL
272 *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
273 * IN PKSPIN_LOCK Lock)
274 */
275 .global @ExfInterlockedPopEntryList@8
276 @ExfInterlockedPopEntryList@8:
277
278 /* Save flags and disable interrupts */
279 .start5:
280 pushfd
281 cli
282 ACQUIRE_SPINLOCK(edx, .spin5)
283
284 /* Get list pointer */
285 mov eax, [ecx]
286
287 /* Check if it's empty */
288 or eax, eax
289 je 3f
290
291 /* Get next entry and do deletion */
292 #ifdef CONFIG_SMP
293 push edx
294 #endif
295 mov edx, [eax]
296 mov [ecx], edx
297 #ifdef CONFIG_SMP
298 pop edx
299 #endif
300
301 2:
302 /* Release lock */
303 RELEASE_SPINLOCK(edx)
304
305 /* Restore flags */
306 popfd
307
308 /* Return */
309 ret
310
311 3:
312 /* Return empty list */
313 xor eax, eax
314 jmp 2b
315
316 #ifdef CONFIG_SMP
317 .spin5:
318 popfd
319 SPIN_ON_LOCK(edx, .start5)
320 #endif
321
322 /*PSINGLE_LIST_ENTRY
323 *FASTCALL
324 *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
325 * IN PSINGLE_LIST_ENTRY ListEntry,
326 * IN PKSPIN_LOCK Lock)
327 */
328 .global @ExfInterlockedPushEntryList@12
329 @ExfInterlockedPushEntryList@12:
330
331 /* Save flags */
332 pushfd
333
334 /* Save lock pointer */
335 #ifdef CONFIG_SMP
336 push edx
337 mov edx, [esp+12]
338 #endif
339
340 /* Disable interrupts */
341 .start6:
342 cli
343 #ifdef CONFIG_SMP
344 ACQUIRE_SPINLOCK(edx, .spin6)
345 pop edx
346 #endif
347
348 /* Get list pointer */
349 mov eax, [ecx]
350
351 /* Do push */
352 mov [edx], eax
353 mov [ecx], edx
354
355 /* Release lock */
356 #ifdef CONFIG_SMP
357 mov edx, [esp+8]
358 RELEASE_SPINLOCK(edx)
359 #endif
360
361 /* Restore flags */
362 popfd
363
364 /* Return */
365 ret 4
366
367 #ifdef CONFIG_SMP
368 .spin6:
369 pop edx
370 popfd
371 pushfd
372 push edx
373 mov edx, [esp+12]
374 SPIN_ON_LOCK(edx, .start6)
375 #endif
376
377 /*PSINGLE_LIST_ENTRY
378 *FASTCALL
379 *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
380 * IN PKSPIN_LOCK Lock)
381 */
382 .global @ExInterlockedPopEntrySList@8
383 .global @InterlockedPopEntrySList@4
384 .global _ExpInterlockedPopEntrySListResume@0
385 .global _ExpInterlockedPopEntrySListFault@0
386 .global _ExpInterlockedPopEntrySListEnd@0
387 @ExInterlockedPopEntrySList@8:
388 @InterlockedPopEntrySList@4:
389
390 /* Save registers */
391 push ebx
392 push ebp
393
394 /* Pointer to list */
395 mov ebp, ecx
396
397 /* Get sequence number and link pointer */
398 _ExpInterlockedPopEntrySListResume@0:
399 mov edx, [ebp+4]
400 mov eax, [ebp]
401
402 /* Check if the list is empty */
403 or eax, eax
404 jz 2f
405
406 /* Copy sequence number and adjust it */
407 lea ecx, [edx-1]
408
409 /* Get next pointer and do the exchange */
410 _ExpInterlockedPopEntrySListFault@0:
411 mov ebx, [eax]
412 _ExpInterlockedPopEntrySListEnd@0:
413 LOCK cmpxchg8b [ebp]
414 jnz _ExpInterlockedPopEntrySListResume@0
415
416 /* Restore registers and return */
417 2:
418 pop ebp
419 pop ebx
420 ret
421
422 /*PSINGLE_LIST_ENTRY
423 *FASTCALL
424 *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
425 * IN PSINGLE_LIST_ENTRY ListEntry,
426 * IN PKSPIN_LOCK Lock)
427 */
428 .global @ExInterlockedPushEntrySList@12
429 @ExInterlockedPushEntrySList@12:
430
431 /* So we can fall through below */
432 pop [esp]
433
434 .global @InterlockedPushEntrySList@8
435 @InterlockedPushEntrySList@8:
436
437 /* Save registers */
438 push ebx
439 push ebp
440
441 /* Pointer to list */
442 mov ebp, ecx
443 mov ebx, edx
444
445 /* Get sequence number and link pointer */
446 mov edx, [ebp+4]
447 mov eax, [ebp]
448
449 1:
450 /* Set link pointer */
451 mov [ebx], eax
452
453 /* Copy sequence number and adjust it */
454 lea ecx, [edx+0x10001]
455
456 /* Do the exchange */
457 LOCK cmpxchg8b [ebp]
458 jnz 1b
459
460 /* Restore registers and return */
461 2:
462 pop ebp
463 pop ebx
464 ret
465
466 /*PSINGLE_LIST_ENTRY
467 *FASTCALL
468 *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)
469 */
470 .global @ExInterlockedFlushSList@4
471 @ExInterlockedFlushSList@4:
472
473 /* Save registers */
474 push ebx
475 push ebp
476
477 /* Clear ebx */
478 xor ebx, ebx
479
480 /* Pointer to list */
481 mov ebp, ecx
482
483 /* Get sequence number and link pointer */
484 mov edx, [ebp+4]
485 mov eax, [ebp]
486
487 1:
488 /* Check if the list is empty */
489 or eax, eax
490 jz 2f
491
492 /* Clear sequence and pointer */
493 mov ecx, edx
494 mov cx, bx
495
496 /* Do the exchange */
497 LOCK cmpxchg8b [ebp]
498 jnz 1b
499
500 /* Restore registers and return */
501 2:
502 pop ebp
503 pop ebx
504 ret
505
506 /*INTERLOCKED_RESULT
507 *FASTCALL
508 *Exfi386InterlockedIncrementLong(IN PLONG Addend)
509 */
510 .global @Exfi386InterlockedIncrementLong@4
511 @Exfi386InterlockedIncrementLong@4:
512
513 /* Do the op */
514 LOCK add dword ptr [ecx], 1
515
516 /* Return */
517 lahf
518 and eax, EFLAG_SELECT
519 ret
520
521 /*INTERLOCKED_RESULT
522 *FASTCALL
523 *Exfi386InterlockedDecrementLong(IN PLONG Addend)
524 */
525 .global @Exfi386InterlockedDecrementLong@4
526 @Exfi386InterlockedDecrementLong@4:
527
528 /* Do the op */
529 LOCK sub dword ptr [ecx], 1
530
531 /* Return */
532 lahf
533 and eax, EFLAG_SELECT
534 ret
535
536 /*ULONG
537 *FASTCALL
538 *Exfi386InterlockedExchangeUlong(IN PULONG Taget,
539 * IN ULONG Value)
540 */
541 .global @Exfi386InterlockedExchangeUlong@8
542 @Exfi386InterlockedExchangeUlong@8:
543
544 #ifdef CONFIG_SMP
545 /* On MP, do the exchange */
546 xchg [ecx], edx
547 mov eax, edx
548 #else
549 /* On UP, use cmpxchg */
550 mov eax, [ecx]
551 1:
552 cmpxchg [ecx], edx
553 jnz 1b
554 #endif
555
556 /* Return */
557 ret
558
559 /*PVOID
560 *FASTCALL
561 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
562 * IN PLONGLONG Exchange,
563 * IN PLONGLONG Comperand)
564 */
565 .global @ExfInterlockedCompareExchange64@12
566 @ExfInterlockedCompareExchange64@12:
567
568 /* Save registers */
569 push ebx
570 push ebp
571
572 /* Get destination pointer, exchange value and comperand value/address */
573 mov ebp, ecx
574 mov ebx, [edx]
575 mov ecx, [edx+4]
576 mov edx, [esp+12]
577 mov eax, [edx]
578 mov edx, [edx+4]
579
580 /* Do the op */
581 LOCK cmpxchg8b [ebp]
582
583 /* Restore volatiles */
584 pop ebp
585 pop ebx
586
587 /* Return */
588 ret 4
589
590 /*PVOID
591 *FASTCALL
592 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
593 * IN PLONGLONG Exchange,
594 * IN PLONGLONG Comperand,
595 * IN PKSPIN_LOCK Lock)
596 */
597 .global @ExInterlockedCompareExchange64@16
598 @ExInterlockedCompareExchange64@16:
599
600 /* Save registers */
601 push ebp
602 push ebp
603
604 /* Get destination pointer, exchange value and comperand value/address */
605 mov ebp, ecx
606 mov ebx, [edx]
607 mov ecx, [edx+4]
608 mov edx, [esp+12]
609 mov eax, [edx]
610 mov edx, [edx+4]
611
612 /* Do the op */
613 LOCK cmpxchg8b [ebp]
614
615 /* Restore volatiles */
616 pop ebp
617 pop ebx
618
619 /* Return */
620 ret 8
621
622 /*** Non-586 functions ***/
623
624 /*PSINGLE_LIST_ENTRY
625 *FASTCALL
626 *ExfInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
627 * IN PKSPIN_LOCK Lock)
628 */
629 .global @ExfInterlockedPopEntrySList@8
630 @ExfInterlockedPopEntrySList@8:
631
632 /* Save flags */
633 .starta:
634 pushfd
635
636 /* Disable interrupts */
637 cli
638
639 /* Acquire lock */
640 ACQUIRE_SPINLOCK(edx, .spina)
641
642 /* Get the next link and check if it's empty */
643 mov eax, [ecx]
644 or eax, eax
645 jz 1f
646
647 /* Get address of the next link and store it */
648 push [eax]
649 pop [ecx]
650
651 /* Decrement list depth */
652 dec dword ptr [ecx+4]
653
654 1:
655 #ifdef CONFIG_SMP
656 /* Release spinlock */
657 RELEASE_SPINLOCK(edx)
658 #endif
659
660 /* Restore flags and return */
661 popfd
662 ret 0
663
664 #ifdef CONFIG_SMP
665 .spina:
666 /* Restore flags and spin */
667 popfd
668 SPIN_ON_LOCK(edx, .starta)
669 #endif
670
671 /*PSINGLE_LIST_ENTRY
672 *FASTCALL
673 *ExfInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
674 * IN PSINGLE_LIST_ENTRY ListEntry,
675 * IN PKSPIN_LOCK Lock)
676 */
677 .global @ExfInterlockedPushEntrySList@12
678 @ExfInterlockedPushEntrySList@12:
679
680 /* Save flags */
681 .startb:
682 pushfd
683
684 /* Disable interrupts */
685 cli
686
687 /* Acquire lock */
688 #ifndef CONFIG_SMP
689 mov eax, [esp+8]
690 ACQUIRE_SPINLOCK(eax, .spinb)
691 #endif
692
693 /* Get the next link and check if it's empty */
694 push [ecx]
695
696 /* Get address of the next link and store it */
697 pop [edx]
698 mov [ecx], edx
699
700 /* Increment list depth */
701 inc dword ptr [ecx+4]
702
703 #ifdef CONFIG_SMP
704 /* Release spinlock */
705 RELEASE_SPINLOCK(eax)
706 #endif
707
708 /* Restore flags and return */
709 popfd
710 ret 4
711
712 #ifdef CONFIG_SMP
713 .spinb:
714 /* Restore flags and spin */
715 popfd
716 SPIN_ON_LOCK(eax, .startb)
717 #endif
718
719 /*PVOID
720 *FASTCALL
721 *ExpInterlockedCompareExchange64(IN PLONGLONG Destination,
722 * IN PLONGLONG Exchange,
723 * IN PLONGLONG Comperand,
724 * IN PKSPIN_LOCK Lock)
725 */
726 .global @ExpInterlockedCompareExchange64@16
727 @ExpInterlockedCompareExchange64@16:
728
729 /* Save registers */
730 push ebp
731 push ebp
732
733 /* Get destination pointer, exchange value and comperand value/address */
734 mov ebp, ecx
735 mov ebx, [edx]
736 mov ecx, [edx+4]
737 mov edx, [esp+12]
738 mov eax, [edx]
739 mov edx, [edx+4]
740
741 #ifdef CONFIG_SMP
742 /* Save ESI so we can store KSPINLOCK in it */
743 push esi
744
745 /* Save flags and lock, and disable interrupts */
746 pushfd
747 mov esi, [esp+24]
748 .startc:
749 cli
750
751 /* Acquire the spinlock */
752 ACQUIRE_SPINLOCK(esi, .spinc)
753 #else
754 /* Save flags and disable interrupts */
755 pushfd
756 cli
757 #endif
758
759 /* Compare bottom */
760 cmp eax, [ebp]
761 jne NoMatch
762
763 /* Compare top */
764 cmp edx, [ebp+4]
765 jne NoMatch
766
767 /* Save new value */
768 mov [ebp], ebx
769 mov [ebp+4], ecx
770
771 AfterSave:
772 #ifdef CONFIG_SMP
773 /* Release lock, restore volatiles and flags */
774 RELEASE_SPINLOCK(esi)
775 popfd
776 pop esi
777 #else
778 popfd
779 #endif
780
781 /* Restore the other volatiles and return */
782 pop ebp
783 pop ebx
784
785 /* Return */
786 ret 8
787
788 NoMatch:
789 /* Return the current value */
790 mov eax, [ebp]
791 mov edx, [ebp+4]
792 jmp AfterSave
793
794 #ifdef CONFIG_SMP
795 .spinc:
796 /* Restore flags and spin */
797 popfd
798 pushfd
799 SPIN_ON_LOCK(esi, .startc)
800 #endif
801 /* EOF */