- Revert 49927 "Update to trunk" as it breaks KsStudio (again)
[reactos.git] / ntoskrnl / ex / i386 / fastinterlck_asm.S
1 /*
2 * COPYRIGHT: See COPYING in the top level directory
3 * PROJECT: ReactOS kernel
4 * FILE: ntoskrnl/ex/i386/fastinterlck_asm.S
5 * PURPOSE: FASTCALL Interlocked Functions
6 * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)
7 */
8
9 /* INCLUDES ******************************************************************/
10
11 #include <reactos/asm.h>
12 #include <ndk/asm.h>
13 #include <internal/i386/asmmacro.S>
14
15 /* FUNCTIONS ****************************************************************/
16
17 .code32
18 .text
19
20 /*
21 * NOTE: These functions must obey the following rules:
22 * - Acquire locks only on MP systems.
23 * - Be safe at HIGH_LEVEL (no paged access).
24 * - Preserve flags.
25 * - Disable interrups.
26 */
27
28 /*VOID
29 *FASTCALL
30 *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,
31 * IN ULONG Increment)
32 */
33 .global @ExInterlockedAddLargeStatistic@8
34 @ExInterlockedAddLargeStatistic@8:
35
36 #ifdef CONFIG_SMP
37 /* Do the addition */
38 lock add [ecx], edx
39
40 /* Check for carry bit and return */
41 jb 1f
42 ret
43
44 1:
45 /* Add carry */
46 lock adc dword ptr [ecx+4], 0
47 #else
48 /* Do the addition and add the carry */
49 add dword ptr [ecx], edx
50 adc dword ptr [ecx+4], 0
51 #endif
52 /* Return */
53 ret
54
55 /*ULONG
56 *FASTCALL
57 *ExfInterlockedAddUlong(IN PULONG Addend,
58 * IN ULONG Increment,
59 * IN PKSPIN_LOCK Lock)
60 */
61 .global @ExfInterlockedAddUlong@12
62 @ExfInterlockedAddUlong@12:
63
64 /* Save flags */
65 pushfd
66
67 #ifdef CONFIG_SMP
68 /* Get lock address */
69 mov eax, [esp+8]
70 .start1:
71 #endif
72 /* Disable interrupts */
73 cli
74
75 /* Acquire lock */
76 ACQUIRE_SPINLOCK(eax, .spin1)
77
78 /* Do the add */
79 mov eax, [ecx]
80 add [ecx], edx
81
82 #ifdef CONFIG_SMP
83 /* Get spinlock address and release it */
84 mov edx, [esp+8]
85 RELEASE_SPINLOCK(edx)
86 #endif
87
88 /* Restore flags and return */
89 popfd
90 ret 4
91
92 #ifdef CONFIG_SMP
93 .spin1:
94 /* Restore flags and spin */
95 popfd
96 pushfd
97 SPIN_ON_LOCK(eax, .start1)
98 #endif
99
100 /*PLIST_ENTRY
101 *FASTCALL
102 *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
103 * IN PLIST_ENTRY ListEntry,
104 * IN PKSPIN_LOCK Lock)
105 */
106 .global @ExfInterlockedInsertHeadList@12
107 @ExfInterlockedInsertHeadList@12:
108
109 #ifdef CONFIG_SMP
110 /* Save lock address */
111 push esi
112 mov esi, [esp+8]
113 #endif
114
115 /* Save flags and disable interrupts */
116 pushfd
117 .start2:
118 cli
119
120 /* Acquire lock */
121 ACQUIRE_SPINLOCK(esi, .spin2)
122
123 /* Get list pointer */
124 mov eax, [ecx]
125
126 /* Do the insert */
127 mov [edx], eax
128 mov [edx+4], ecx
129 mov [ecx], edx
130 mov [eax+4], edx
131
132 /* Release lock and restore flags */
133 RELEASE_SPINLOCK(esi)
134 popfd
135
136 #ifdef CONFIG_SMP
137 pop esi
138 #endif
139
140 /* Check if list was empty */
141 xor eax, ecx
142 jz 2f
143
144 /* Return list pointer */
145 xor eax, ecx
146 2:
147 ret 4
148
149 #ifdef CONFIG_SMP
150 .spin2:
151 popfd
152 pushfd
153 SPIN_ON_LOCK(esi, .start2)
154 #endif
155
156 /*PLIST_ENTRY
157 *FASTCALL
158 *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
159 * IN PLIST_ENTRY ListEntry,
160 * IN PKSPIN_LOCK Lock)
161 */
162 .global @ExfInterlockedInsertTailList@12
163 @ExfInterlockedInsertTailList@12:
164
165 #ifdef CONFIG_SMP
166 /* Save lock address */
167 push esi
168 mov esi, [esp+8]
169 #endif
170
171 /* Save flags and disable interrupts */
172 pushfd
173 .start3:
174 cli
175
176 /* Acquire lock */
177 ACQUIRE_SPINLOCK(esi, .spin3)
178
179 /* Get list pointer */
180 mov eax, [ecx+4]
181
182 /* Do the insert */
183 mov [edx], ecx
184 mov [edx+4], eax
185 mov [ecx+4], edx
186 mov [eax], edx
187
188 /* Release lock and restore flags */
189 RELEASE_SPINLOCK(esi)
190 popfd
191
192 #ifdef CONFIG_SMP
193 pop esi
194 #endif
195
196 /* Check if list was empty */
197 xor eax, ecx
198 jz 2f
199
200 /* Return list pointer */
201 xor eax, ecx
202 2:
203 ret 4
204
205 #ifdef CONFIG_SMP
206 .spin3:
207 popfd
208 pushfd
209 SPIN_ON_LOCK(esi, .start3)
210 #endif
211
212 /*PLIST_ENTRY
213 *FASTCALL
214 *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,
215 * IN PKSPIN_LOCK Lock)
216 */
217 .global @ExfInterlockedRemoveHeadList@8
218 @ExfInterlockedRemoveHeadList@8:
219
220 /* Save flags and disable interrupts */
221 .start4:
222 pushfd
223 cli
224 ACQUIRE_SPINLOCK(edx, .spin4)
225
226 /* Get list pointer */
227 mov eax, [ecx]
228
229 /* Check if it's empty */
230 cmp eax, ecx
231 je 2f
232
233 /* Get the next entry and do the deletion */
234 #ifdef CONFIG_SMP
235 push ebx
236 mov ebx, [eax]
237 mov [ecx], ebx
238 mov [ebx+4], ecx
239 #else
240 mov edx, [eax]
241 mov [ecx], edx
242 mov [edx+4], ecx
243 #endif
244
245 /* Release lock */
246 #ifdef CONFIG_SMP
247 RELEASE_SPINLOCK(edx)
248 pop ebx
249 #endif
250
251 /* Restore flags */
252 popfd
253
254 /* Return */
255 ret
256
257 2:
258 /* Release lock */
259 RELEASE_SPINLOCK(edx)
260
261 /* Restore flags */
262 popfd
263
264 /* Return empty list */
265 xor eax, eax
266 ret
267
268 #ifdef CONFIG_SMP
269 .spin4:
270 popfd
271 SPIN_ON_LOCK(edx, .start4)
272 #endif
273
274 /*PSINGLE_LIST_ENTRY
275 *FASTCALL
276 *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
277 * IN PKSPIN_LOCK Lock)
278 */
279 .global @ExfInterlockedPopEntryList@8
280 @ExfInterlockedPopEntryList@8:
281
282 /* Save flags and disable interrupts */
283 .start5:
284 pushfd
285 cli
286 ACQUIRE_SPINLOCK(edx, .spin5)
287
288 /* Get list pointer */
289 mov eax, [ecx]
290
291 /* Check if it's empty */
292 or eax, eax
293 je 3f
294
295 /* Get next entry and do deletion */
296 #ifdef CONFIG_SMP
297 push edx
298 #endif
299 mov edx, [eax]
300 mov [ecx], edx
301 #ifdef CONFIG_SMP
302 pop edx
303 #endif
304
305 2:
306 /* Release lock */
307 RELEASE_SPINLOCK(edx)
308
309 /* Restore flags */
310 popfd
311
312 /* Return */
313 ret
314
315 3:
316 /* Return empty list */
317 xor eax, eax
318 jmp 2b
319
320 #ifdef CONFIG_SMP
321 .spin5:
322 popfd
323 SPIN_ON_LOCK(edx, .start5)
324 #endif
325
326 /*PSINGLE_LIST_ENTRY
327 *FASTCALL
328 *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
329 * IN PSINGLE_LIST_ENTRY ListEntry,
330 * IN PKSPIN_LOCK Lock)
331 */
332 .global @ExfInterlockedPushEntryList@12
333 @ExfInterlockedPushEntryList@12:
334
335 /* Save flags */
336 pushfd
337
338 /* Save lock pointer */
339 #ifdef CONFIG_SMP
340 push edx
341 mov edx, [esp+12]
342 #endif
343
344 /* Disable interrupts */
345 .start6:
346 cli
347 #ifdef CONFIG_SMP
348 ACQUIRE_SPINLOCK(edx, .spin6)
349 pop edx
350 #endif
351
352 /* Get list pointer */
353 mov eax, [ecx]
354
355 /* Do push */
356 mov [edx], eax
357 mov [ecx], edx
358
359 /* Release lock */
360 #ifdef CONFIG_SMP
361 mov edx, [esp+8]
362 RELEASE_SPINLOCK(edx)
363 #endif
364
365 /* Restore flags */
366 popfd
367
368 /* Return */
369 ret 4
370
371 #ifdef CONFIG_SMP
372 .spin6:
373 pop edx
374 popfd
375 pushfd
376 push edx
377 mov edx, [esp+12]
378 SPIN_ON_LOCK(edx, .start6)
379 #endif
380
381 /*PSINGLE_LIST_ENTRY
382 *FASTCALL
383 *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
384 * IN PKSPIN_LOCK Lock)
385 */
386 .global @ExInterlockedPopEntrySList@8
387 .global @InterlockedPopEntrySList@4
388 .global _ExpInterlockedPopEntrySListResume@0
389 .global _ExpInterlockedPopEntrySListFault@0
390 .global _ExpInterlockedPopEntrySListEnd@0
391 @ExInterlockedPopEntrySList@8:
392 @InterlockedPopEntrySList@4:
393
394 /* Save registers */
395 push ebx
396 push ebp
397
398 /* Pointer to list */
399 mov ebp, ecx
400
401 /* Get sequence number and link pointer */
402 _ExpInterlockedPopEntrySListResume@0:
403 mov edx, [ebp+4]
404 mov eax, [ebp]
405
406 /* Check if the list is empty */
407 or eax, eax
408 jz 2f
409
410 /* Copy sequence number and adjust it */
411 lea ecx, [edx-1]
412
413 /* Get next pointer and do the exchange */
414 _ExpInterlockedPopEntrySListFault@0:
415 mov ebx, [eax]
416 _ExpInterlockedPopEntrySListEnd@0:
417 LOCK cmpxchg8b qword ptr [ebp]
418 jnz _ExpInterlockedPopEntrySListResume@0
419
420 /* Restore registers and return */
421 2:
422 pop ebp
423 pop ebx
424 ret
425
426 /*PSINGLE_LIST_ENTRY
427 *FASTCALL
428 *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
429 * IN PSINGLE_LIST_ENTRY ListEntry,
430 * IN PKSPIN_LOCK Lock)
431 */
432 .global @ExInterlockedPushEntrySList@12
433 @ExInterlockedPushEntrySList@12:
434
435 /* So we can fall through below */
436 pop [esp]
437
438 .global @InterlockedPushEntrySList@8
439 @InterlockedPushEntrySList@8:
440
441 /* Save registers */
442 push ebx
443 push ebp
444
445 /* Pointer to list */
446 mov ebp, ecx
447 mov ebx, edx
448
449 /* Get sequence number and link pointer */
450 mov edx, [ebp+4]
451 mov eax, [ebp]
452
453 1:
454 /* Set link pointer */
455 mov [ebx], eax
456
457 /* Copy sequence number and adjust it */
458 lea ecx, [edx+0x10001]
459
460 /* Do the exchange */
461 LOCK cmpxchg8b qword ptr [ebp]
462 jnz 1b
463
464 /* Restore registers and return */
465 2:
466 pop ebp
467 pop ebx
468 ret
469
470 /*PSINGLE_LIST_ENTRY
471 *FASTCALL
472 *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)
473 */
474 .global @ExInterlockedFlushSList@4
475 @ExInterlockedFlushSList@4:
476
477 /* Save registers */
478 push ebx
479 push ebp
480
481 /* Clear ebx */
482 xor ebx, ebx
483
484 /* Pointer to list */
485 mov ebp, ecx
486
487 /* Get sequence number and link pointer */
488 mov edx, [ebp+4]
489 mov eax, [ebp]
490
491 1:
492 /* Check if the list is empty */
493 or eax, eax
494 jz 2f
495
496 /* Clear sequence and pointer */
497 mov ecx, edx
498 mov cx, bx
499
500 /* Do the exchange */
501 LOCK cmpxchg8b qword ptr [ebp]
502 jnz 1b
503
504 /* Restore registers and return */
505 2:
506 pop ebp
507 pop ebx
508 ret
509
510 /*INTERLOCKED_RESULT
511 *FASTCALL
512 *Exfi386InterlockedIncrementLong(IN PLONG Addend)
513 */
514 .global @Exfi386InterlockedIncrementLong@4
515 @Exfi386InterlockedIncrementLong@4:
516
517 /* Do the op */
518 LOCK add dword ptr [ecx], 1
519
520 /* Return */
521 lahf
522 and eax, EFLAG_SELECT
523 ret
524
525 /*INTERLOCKED_RESULT
526 *FASTCALL
527 *Exfi386InterlockedDecrementLong(IN PLONG Addend)
528 */
529 .global @Exfi386InterlockedDecrementLong@4
530 @Exfi386InterlockedDecrementLong@4:
531
532 /* Do the op */
533 LOCK sub dword ptr [ecx], 1
534
535 /* Return */
536 lahf
537 and eax, EFLAG_SELECT
538 ret
539
540 /*ULONG
541 *FASTCALL
542 *Exfi386InterlockedExchangeUlong(IN PULONG Taget,
543 * IN ULONG Value)
544 */
545 .global @Exfi386InterlockedExchangeUlong@8
546 @Exfi386InterlockedExchangeUlong@8:
547
548 #ifdef CONFIG_SMP
549 /* On MP, do the exchange */
550 xchg [ecx], edx
551 mov eax, edx
552 #else
553 /* On UP, use cmpxchg */
554 mov eax, [ecx]
555 1:
556 cmpxchg [ecx], edx
557 jnz 1b
558 #endif
559
560 /* Return */
561 ret
562
563 /*PVOID
564 *FASTCALL
565 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
566 * IN PLONGLONG Exchange,
567 * IN PLONGLONG Comperand)
568 */
569 .global @ExfInterlockedCompareExchange64@12
570 @ExfInterlockedCompareExchange64@12:
571
572 /* Save registers */
573 push ebx
574 push ebp
575
576 /* Get destination pointer, exchange value and comperand value/address */
577 mov ebp, ecx
578 mov ebx, [edx]
579 mov ecx, [edx+4]
580 mov edx, [esp+12]
581 mov eax, [edx]
582 mov edx, [edx+4]
583
584 /* Do the op */
585 LOCK cmpxchg8b qword ptr [ebp]
586
587 /* Restore volatiles */
588 pop ebp
589 pop ebx
590
591 /* Return */
592 ret 4
593
594 /*PVOID
595 *FASTCALL
596 *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
597 * IN PLONGLONG Exchange,
598 * IN PLONGLONG Comperand,
599 * IN PKSPIN_LOCK Lock)
600 */
601 .global @ExInterlockedCompareExchange64@16
602 @ExInterlockedCompareExchange64@16:
603
604 /* Save registers */
605 push ebp
606 push ebp
607
608 /* Get destination pointer, exchange value and comperand value/address */
609 mov ebp, ecx
610 mov ebx, [edx]
611 mov ecx, [edx+4]
612 mov edx, [esp+12]
613 mov eax, [edx]
614 mov edx, [edx+4]
615
616 /* Do the op */
617 LOCK cmpxchg8b qword ptr [ebp]
618
619 /* Restore volatiles */
620 pop ebp
621 pop ebx
622
623 /* Return */
624 ret 8
625
626 /*** Non-586 functions ***/
627
628 /*PSINGLE_LIST_ENTRY
629 *FASTCALL
630 *ExfInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
631 * IN PKSPIN_LOCK Lock)
632 */
633 .global @ExfInterlockedPopEntrySList@8
634 @ExfInterlockedPopEntrySList@8:
635
636 /* Save flags */
637 .starta:
638 pushfd
639
640 /* Disable interrupts */
641 cli
642
643 /* Acquire lock */
644 ACQUIRE_SPINLOCK(edx, .spina)
645
646 /* Get the next link and check if it's empty */
647 mov eax, [ecx]
648 or eax, eax
649 jz 1f
650
651 /* Get address of the next link and store it */
652 push [eax]
653 pop [ecx]
654
655 /* Decrement list depth */
656 dec dword ptr [ecx+4]
657
658 1:
659 #ifdef CONFIG_SMP
660 /* Release spinlock */
661 RELEASE_SPINLOCK(edx)
662 #endif
663
664 /* Restore flags and return */
665 popfd
666 ret 0
667
668 #ifdef CONFIG_SMP
669 .spina:
670 /* Restore flags and spin */
671 popfd
672 SPIN_ON_LOCK(edx, .starta)
673 #endif
674
675 /*PSINGLE_LIST_ENTRY
676 *FASTCALL
677 *ExfInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
678 * IN PSINGLE_LIST_ENTRY ListEntry,
679 * IN PKSPIN_LOCK Lock)
680 */
681 .global @ExfInterlockedPushEntrySList@12
682 @ExfInterlockedPushEntrySList@12:
683
684 /* Save flags */
685 .startb:
686 pushfd
687
688 /* Disable interrupts */
689 cli
690
691 /* Acquire lock */
692 #ifndef CONFIG_SMP
693 mov eax, [esp+8]
694 ACQUIRE_SPINLOCK(eax, .spinb)
695 #endif
696
697 /* Get the next link and check if it's empty */
698 push [ecx]
699
700 /* Get address of the next link and store it */
701 pop [edx]
702 mov [ecx], edx
703
704 /* Increment list depth */
705 inc dword ptr [ecx+4]
706
707 #ifdef CONFIG_SMP
708 /* Release spinlock */
709 RELEASE_SPINLOCK(eax)
710 #endif
711
712 /* Restore flags and return */
713 popfd
714 ret 4
715
716 #ifdef CONFIG_SMP
717 .spinb:
718 /* Restore flags and spin */
719 popfd
720 SPIN_ON_LOCK(eax, .startb)
721 #endif
722
723 /*PVOID
724 *FASTCALL
725 *ExpInterlockedCompareExchange64(IN PLONGLONG Destination,
726 * IN PLONGLONG Exchange,
727 * IN PLONGLONG Comperand,
728 * IN PKSPIN_LOCK Lock)
729 */
730 .global @ExpInterlockedCompareExchange64@16
731 @ExpInterlockedCompareExchange64@16:
732
733 /* Save registers */
734 push ebp
735 push ebp
736
737 /* Get destination pointer, exchange value and comperand value/address */
738 mov ebp, ecx
739 mov ebx, [edx]
740 mov ecx, [edx+4]
741 mov edx, [esp+12]
742 mov eax, [edx]
743 mov edx, [edx+4]
744
745 #ifdef CONFIG_SMP
746 /* Save ESI so we can store KSPINLOCK in it */
747 push esi
748
749 /* Save flags and lock, and disable interrupts */
750 pushfd
751 mov esi, [esp+24]
752 .startc:
753 cli
754
755 /* Acquire the spinlock */
756 ACQUIRE_SPINLOCK(esi, .spinc)
757 #else
758 /* Save flags and disable interrupts */
759 pushfd
760 cli
761 #endif
762
763 /* Compare bottom */
764 cmp eax, [ebp]
765 jne NoMatch
766
767 /* Compare top */
768 cmp edx, [ebp+4]
769 jne NoMatch
770
771 /* Save new value */
772 mov [ebp], ebx
773 mov [ebp+4], ecx
774
775 AfterSave:
776 #ifdef CONFIG_SMP
777 /* Release lock, restore volatiles and flags */
778 RELEASE_SPINLOCK(esi)
779 popfd
780 pop esi
781 #else
782 popfd
783 #endif
784
785 /* Restore the other volatiles and return */
786 pop ebp
787 pop ebx
788
789 /* Return */
790 ret 8
791
792 NoMatch:
793 /* Return the current value */
794 mov eax, [ebp]
795 mov edx, [ebp+4]
796 jmp AfterSave
797
798 #ifdef CONFIG_SMP
799 .spinc:
800 /* Restore flags and spin */
801 popfd
802 pushfd
803 SPIN_ON_LOCK(esi, .startc)
804 #endif
805 /* EOF */