* The functions now disable interrupts as they should. This is a core functionality of the ExInterlocked functions which we did not do.
* The functions now preserve EFLAGS as they should. This is another core functionality and point of ExInterlocked functions, which we did not do.
* Use KiAcquire/ReleaseSpinlock equivalents instead of going through HAL. Also make them macros which can be later used in other assembly files.
* Write optimized versions for non-SMP machines. Namely, the spinlock macros are coded to be no-ops on UP machines, since they are not needed.
* Write all functions in assembly, so we can cleanly optimize them and properly restore/save EFLAGS and use CLI.
- Use the right interlocked macro in guarded mutex implementation.
- Fix export of Exi386InterlockedExchangeUlong
- Use documented EFLAGS definition instead of hard-coded value in some interlocked functions.
svn path=/trunk/; revision=20474
#define EFLAGS_ALIGN_CHECK 0x40000
#define EFLAGS_VIF 0x80000
#define EFLAGS_VIP 0x100000
+#define EFLAG_SIGN 0x8000
+#define EFLAG_ZERO 0x4000
+#define EFLAG_SELECT (EFLAG_SIGN | EFLAG_ZERO)
//
// CR0
--- /dev/null
+/*\r
+ * COPYRIGHT: See COPYING in the top level directory\r
+ * PROJECT: ReactOS kernel\r
+ * FILE: ntoskrnl/ex/i386/fastinterlck_asm.S\r
+ * PURPOSE: FASTCALL Interlocked Functions\r
+ * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)\r
+ */\r
+ \r
+/* INCLUDES ******************************************************************/\r
+#include <ndk/asm.h>\r
+.intel_syntax noprefix\r
+\r
+//#define CONFIG_SMP\r
+\r
+#ifdef CONFIG_SMP\r
+#define LOCK lock\r
+#define ACQUIRE_SPINLOCK(x, y) \\r
+ lock bts dword ptr [x], 0; \\r
+ jb y\r
+#define RELEASE_SPINLOCK(x) mov byte ptr [x], 0\r
+#define SPIN_ON_LOCK(x, y) \\r
+1: \\r
+ test dword ptr [x], 1; \\r
+ jz y; \\r
+ pause; \\r
+ jmp 1b\r
+#else\r
+#define LOCK\r
+#define ACQUIRE_SPINLOCK(x, y)\r
+#define RELEASE_SPINLOCK(x) \r
+#endif\r
+\r
+/* FUNCTIONS ****************************************************************/\r
+\r
+/*\r
+ * NOTE: These functions must obey the following rules:\r
+ * - Acquire locks only on MP systems.\r
+ * - Be safe at HIGH_LEVEL (no paged access).\r
+ * - Preserve flags.\r
+ * - Disable interrups.\r
+ */\r
+\r
+/*VOID\r
+ *FASTCALL\r
+ *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,\r
+ * IN ULONG Increment)\r
+ */\r
+.global @ExInterlockedAddLargeStatistic@8\r
+@ExInterlockedAddLargeStatistic@8:\r
+\r
+#ifdef CONFIG_SMP\r
+ /* Do the addition */\r
+ lock add [ecx], edx\r
+\r
+ /* Check for carry bit and return */\r
+ jb 1f\r
+ ret\r
+\r
+1:\r
+ /* Add carry */\r
+ lock adc dword ptr [ecx+4], 0\r
+#else\r
+ /* Do the addition and add the carry */\r
+ add dword ptr [ecx], edx\r
+ adc dword ptr [ecx+4], 0\r
+#endif\r
+ /* Return */\r
+ ret\r
+\r
+/*ULONG\r
+ *FASTCALL\r
+ *ExfInterlockedAddUlong(IN PULONG Addend,\r
+ * IN ULONG Increment,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedAddUlong@12\r
+@ExfInterlockedAddUlong@12:\r
+\r
+ /* Save flags */\r
+ pushfd\r
+\r
+#ifdef CONFIG_SMP\r
+ /* Get lock address */\r
+ mov eax, [esp+8]\r
+.start1:\r
+#endif\r
+ /* Disable interrupts */\r
+ cli\r
+\r
+ /* Acquire lock */\r
+ ACQUIRE_SPINLOCK(eax, .spin1)\r
+\r
+ /* Do the add */\r
+ mov eax, [ecx]\r
+ add [ecx], edx\r
+\r
+#ifdef CONFIG_SMP\r
+ /* Get spinlock address and release it */\r
+ mov edx, [esp+8]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags and return */\r
+ popfd\r
+ ret 4\r
+\r
+#ifdef CONFIG_SMP\r
+.spin1:\r
+ /* Restore flags and spin */\r
+ popfd\r
+ pushfd\r
+ SPIN_ON_LOCK(eax, .start1)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *FASTCALL\r
+ *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedInsertHeadList@12\r
+@ExfInterlockedInsertHeadList@12:\r
+\r
+#ifdef CONFIG_SMP\r
+ /* Save lock address */\r
+ push esi\r
+ mov esi, [esp+8]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+ pushfd\r
+.start2:\r
+ cli\r
+\r
+ /* Acquire lock */\r
+ ACQUIRE_SPINLOCK(esi, .spin2)\r
+\r
+ /* Get list pointer */\r
+ mov eax, [ecx]\r
+\r
+ /* Do the insert */\r
+ mov [edx], eax\r
+ mov [edx+4], ecx\r
+ mov [ecx], edx\r
+ mov [eax+4], edx\r
+\r
+ /* Release lock and restore flags */\r
+ RELEASE_SPINLOCK(esi)\r
+ popfd\r
+\r
+#ifdef CONFIG_SMP\r
+ pop esi\r
+#endif\r
+\r
+ /* Check if list was empty */\r
+ xor eax, ecx\r
+ jz 2f\r
+\r
+ /* Return list pointer */\r
+ xor eax, ecx\r
+2:\r
+ ret 4\r
+\r
+#ifdef CONFIG_SMP\r
+.spin2:\r
+ popfd\r
+ pushfd\r
+ SPIN_ON_LOCK(esi, .start2)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedInsertTailList@12\r
+@ExfInterlockedInsertTailList@12:\r
+\r
+#ifdef CONFIG_SMP\r
+ /* Save lock address */\r
+ push esi\r
+ mov esi, [esp+8]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+ pushfd\r
+.start3:\r
+ cli\r
+\r
+ /* Acquire lock */\r
+ ACQUIRE_SPINLOCK(esi, .spin3)\r
+\r
+ /* Get list pointer */\r
+ mov eax, [ecx+4]\r
+\r
+ /* Do the insert */\r
+ mov [edx], ecx\r
+ mov [edx+4], eax\r
+ mov [ecx+4], edx\r
+ mov [eax], edx\r
+\r
+ /* Release lock and restore flags */\r
+ RELEASE_SPINLOCK(esi)\r
+ popfd\r
+\r
+#ifdef CONFIG_SMP\r
+ pop esi\r
+#endif\r
+\r
+ /* Check if list was empty */\r
+ xor eax, ecx\r
+ jz 2f\r
+\r
+ /* Return list pointer */\r
+ xor eax, ecx\r
+2:\r
+ ret 4\r
+\r
+#ifdef CONFIG_SMP\r
+.spin3:\r
+ popfd\r
+ pushfd\r
+ SPIN_ON_LOCK(esi, .start3)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *FASTCALL\r
+ *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedRemoveHeadList@8\r
+@ExfInterlockedRemoveHeadList@8:\r
+\r
+ /* Save flags and disable interrupts */\r
+.start4:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin4)\r
+\r
+ /* Get list pointer */\r
+ mov eax, [ecx]\r
+\r
+ /* Check if it's empty */\r
+ cmp eax, ecx\r
+ je 2f\r
+\r
+ /* Get the next entry and do the deletion*/\r
+#ifdef CONFIG_SMP\r
+ push ebx\r
+ mov ebx, [eax]\r
+ mov [ecx], ebx\r
+ mov [ebx+4], ecx\r
+#else\r
+ mov edx, [eax]\r
+ mov [ecx], edx\r
+ mov [edx+4], ecx\r
+#endif\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ RELEASE_SPINLOCK(edx)\r
+ pop ebx\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret\r
+\r
+2:\r
+ /* Release lock */\r
+ RELEASE_SPINLOCK(edx)\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return empty list */\r
+ xor eax, eax\r
+ ret\r
+\r
+#ifdef CONFIG_SMP\r
+.spin4:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start4)\r
+#endif\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *FASTCALL\r
+ *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedPopEntryList@8\r
+@ExfInterlockedPopEntryList@8:\r
+\r
+ /* Save flags and disable interrupts */\r
+.start5:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin5)\r
+\r
+ /* Get list pointer */\r
+ mov eax, [ecx]\r
+\r
+ /* Check if it's empty */\r
+ or eax, eax\r
+ je 3f\r
+\r
+ /* Get next entry and do deletion */\r
+#ifdef CONFIG_SMP\r
+ push edx\r
+#endif\r
+ mov edx, [eax]\r
+ mov [ecx], edx\r
+#ifdef CONFIG_SMP\r
+ pop edx\r
+#endif\r
+\r
+2:\r
+ /* Release lock */\r
+ RELEASE_SPINLOCK(edx)\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret\r
+\r
+3:\r
+ /* Return empty list */\r
+ xor eax, eax\r
+ jmp 2b\r
+\r
+#ifdef CONFIG_SMP\r
+.spin5:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start5)\r
+#endif\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PSINGLE_LIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExfInterlockedPushEntryList@12\r
+@ExfInterlockedPushEntryList@12:\r
+\r
+ /* Save flags */\r
+ pushfd\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ push edx\r
+ mov edx, [esp+12]\r
+#endif\r
+\r
+ /* Disable interrupts */\r
+.start6:\r
+ cli\r
+#ifdef CONFIG_SMP\r
+ ACQUIRE_SPINLOCK(edx, .spin6)\r
+ pop edx\r
+#endif\r
+\r
+ /* Get list pointer */\r
+ mov eax, [ecx]\r
+\r
+ /* Do push */\r
+ mov [edx], eax\r
+ mov [ecx], edx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+8]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret 4\r
+\r
+#ifdef CONFIG_SMP\r
+.spin6:\r
+ pop edx\r
+ popfd\r
+ pushfd\r
+ push edx\r
+ mov edx, [esp+12]\r
+ SPIN_ON_LOCK(edx, .start6)\r
+#endif\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExInterlockedPopEntrySList@8\r
+.global @InterlockedPopEntrySList@4\r
+@ExInterlockedPopEntrySList@8:\r
+@InterlockedPopEntrySList@4:\r
+\r
+ /* Save registers */\r
+ push ebx\r
+ push ebp\r
+\r
+ /* Pointer to list */\r
+ mov ebp, ecx\r
+\r
+ /* Get sequence number and link pointer */\r
+ mov edx, [ebp+4]\r
+ mov eax, [ebp]\r
+\r
+1:\r
+ /* Check if the list is empty */\r
+ or eax, eax\r
+ jz 2f\r
+\r
+ /* Copy sequence number and adjust it */\r
+ lea ecx, [edx-1]\r
+\r
+ /* Get next pointer and do the exchange */\r
+ mov ebx, [eax]\r
+ LOCK cmpxchg8b qword ptr [ebp]\r
+ jnz 1b\r
+\r
+ /* Restore registers and return */\r
+2:\r
+ pop ebp\r
+ pop ebx\r
+ ret\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PSINGLE_LIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExInterlockedPushEntrySList@12\r
+@ExInterlockedPushEntrySList@12:\r
+\r
+ /* So we can fall through below */\r
+ pop [esp]\r
+\r
+.global @InterlockedPushEntrySList@8\r
+@InterlockedPushEntrySList@8:\r
+\r
+ /* Save registers */\r
+ push ebx\r
+ push ebp\r
+\r
+ /* Pointer to list */\r
+ mov ebp, ecx\r
+ mov ebx, edx\r
+\r
+ /* Get sequence number and link pointer */\r
+ mov edx, [ebp+4]\r
+ mov eax, [ebp]\r
+\r
+1:\r
+ /* Set link pointer */\r
+ mov [ebx], eax\r
+\r
+ /* Copy sequence number and adjust it */\r
+ lea ecx, [edx+0x10001]\r
+\r
+ /* Do the exchange */\r
+ LOCK cmpxchg8b qword ptr [ebp]\r
+ jnz 1b\r
+\r
+ /* Restore registers and return */\r
+2:\r
+ pop ebp\r
+ pop ebx\r
+ ret\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)\r
+ */\r
+.global @ExInterlockedFlushSList@4\r
+@ExInterlockedFlushSList@4:\r
+\r
+ /* Save registers */\r
+ push ebx\r
+ push ebp\r
+\r
+ /* Clear ebx */\r
+ xor ebx, ebx\r
+\r
+ /* Pointer to list */\r
+ mov ebp, ecx\r
+\r
+ /* Get sequence number and link pointer */\r
+ mov edx, [ebp+4]\r
+ mov eax, [ebp]\r
+\r
+1:\r
+ /* Check if the list is empty */\r
+ or eax, eax\r
+ jz 2f\r
+\r
+ /* Clear sequence and pointer */\r
+ mov ecx, edx\r
+ mov cx, bx\r
+\r
+ /* Do the exchange */\r
+ LOCK cmpxchg8b qword ptr [ebp]\r
+ jnz 1b\r
+\r
+ /* Restore registers and return */\r
+2:\r
+ pop ebp\r
+ pop ebx\r
+ ret\r
+\r
+/*INTERLOCKED_RESULT\r
+ *FASTCALL\r
+ *Exfi386InterlockedIncrementLong(IN PLONG Addend)\r
+ */\r
+.global @Exfi386InterlockedIncrementLong@4\r
+@Exfi386InterlockedIncrementLong@4:\r
+\r
+ /* Do the op */\r
+ LOCK add dword ptr [ecx], 1\r
+\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret\r
+\r
+/*INTERLOCKED_RESULT\r
+ *FASTCALL\r
+ *Exfi386InterlockedDecrementLong(IN PLONG Addend)\r
+ */\r
+.global @Exfi386InterlockedDecrementLong@4\r
+@Exfi386InterlockedDecrementLong@4:\r
+\r
+ /* Do the op */\r
+ LOCK sub dword ptr [ecx], 1\r
+\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret\r
+\r
+/*ULONG\r
+ *FASTCALL\r
+ *Exfi386InterlockedExchangeUlong(IN PULONG Taget,\r
+ * IN ULONG Value)\r
+ */\r
+.global @Exfi386InterlockedExchangeUlong@8\r
+.global @InterlockedExchange@8\r
+@InterlockedExchange@8:\r
+@Exfi386InterlockedExchangeUlong@8:\r
+\r
+#ifdef CONFIG_SMP\r
+ /* On MP, do the exchange */\r
+ xchg [ecx], edx\r
+ mov eax, edx\r
+#else\r
+ /* On UP, use cmpxchg */\r
+ mov eax, [ecx]\r
+1:\r
+ cmpxchg [ecx], edx\r
+ jnz 1b\r
+#endif\r
+\r
+ /* Return */\r
+ ret\r
+\r
+/*ULONG\r
+ *FASTCALL\r
+ *InterlockedIncrement(IN PLONG Addend)\r
+ */\r
+.global @InterlockedIncrement@4\r
+@InterlockedIncrement@4:\r
+\r
+ /* Do the op */\r
+ mov eax, 1\r
+ LOCK xadd dword ptr [ecx], eax\r
+\r
+ /* Return */\r
+ inc eax\r
+ ret\r
+\r
+/*ULONG\r
+ *FASTCALL\r
+ *InterlockedDecrement(IN PLONG Addend)\r
+ */\r
+.global @InterlockedDecrement@4\r
+@InterlockedDecrement@4:\r
+\r
+ /* Do the op */\r
+ mov eax, -1\r
+ LOCK xadd dword ptr [ecx], eax\r
+\r
+ /* Return */\r
+ dec eax\r
+ ret\r
+\r
+/*PVOID\r
+ *FASTCALL\r
+ *InterlockedCompareExchange(IN OUT PVOID *Destination,\r
+ * IN PVOID Exchange,\r
+ * IN PVOID Comperand)\r
+ */\r
+.global @InterlockedCompareExchange@12\r
+@InterlockedCompareExchange@12:\r
+\r
+ /* Get comperand */\r
+ mov eax, [esp+4]\r
+\r
+ /* Do the op */\r
+ LOCK cmpxchg dword ptr [ecx], edx\r
+\r
+ /* Return */\r
+ ret\r
+\r
+/*PVOID\r
+ *FASTCALL\r
+ *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,\r
+ * IN PLONGLONG Exchange,\r
+ * IN PLONGLONG Comperand)\r
+ */\r
+.global @ExfInterlockedCompareExchange64@12\r
+@ExfInterlockedCompareExchange64@12:\r
+\r
+ /* Save registers */\r
+ push ebp\r
+ push ebp\r
+\r
+ /* Get desination pointer, exchange value and comperand value/address */\r
+ mov ebp, ecx\r
+ mov ebx, [edx]\r
+ mov ecx, [edx+4]\r
+ mov edx, [esp+12]\r
+ mov eax, [edx]\r
+ mov edx, [edx+4]\r
+\r
+ /* Do the op */\r
+ LOCK cmpxchg8b qword ptr [ebp]\r
+\r
+ /* Restore volatiles */\r
+ pop ebp\r
+ pop ebx\r
+\r
+ /* Return */\r
+ ret 4\r
+\r
+/*PVOID\r
+ *FASTCALL\r
+ *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,\r
+ * IN PLONGLONG Exchange,\r
+ * IN PLONGLONG Comperand,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global @ExInterlockedCompareExchange64@16\r
+@ExInterlockedCompareExchange64@16:\r
+\r
+ /* Save registers */\r
+ push ebp\r
+ push ebp\r
+\r
+ /* Get desination pointer, exchange value and comperand value/address */\r
+ mov ebp, ecx\r
+ mov ebx, [edx]\r
+ mov ecx, [edx+4]\r
+ mov edx, [esp+12]\r
+ mov eax, [edx]\r
+ mov edx, [edx+4]\r
+\r
+ /* Do the op */\r
+ LOCK cmpxchg8b qword ptr [ebp]\r
+\r
+ /* Restore volatiles */\r
+ pop ebp\r
+ pop ebx\r
+\r
+ /* Return */\r
+ ret 8\r
+\r
+/*PVOID\r
+ *FASTCALL\r
+ *InterlockedExchangeAdd(IN OUT PLONG Addend,\r
+ * IN LONG Increment)\r
+ */\r
+.global @InterlockedExchangeAdd@8\r
+@InterlockedExchangeAdd@8:\r
+\r
+ /* Do the op */\r
+ LOCK xadd dword ptr [ecx], edx\r
+\r
+ /* Return */\r
+ mov eax, edx\r
+ ret\r
+\r
+/* EOF */\r
-/* $Id: interlck.c 15557 2005-05-28 07:26:38Z hbirr $\r
- *\r
+/*\r
* COPYRIGHT: See COPYING in the top level directory\r
* PROJECT: ReactOS kernel\r
- * FILE: ntoskrnl/ex/i386/interlck.c\r
- * PURPOSE: No purpose listed.\r
- *\r
- * PROGRAMMERS: No programmer listed.\r
+ * FILE: ntoskrnl/ex/i386/interlck_asm.S\r
+ * PURPOSE: STDCALL Interlocked Functions\r
+ * PROGRAMMERS: Alex Ionescu (alex@relsoft.net)\r
*/\r
+ \r
+/* INCLUDES ******************************************************************/\r
+#include <ndk/asm.h>\r
+.intel_syntax noprefix\r
+\r
+//#define CONFIG_SMP\r
\r
#ifdef CONFIG_SMP\r
#define LOCK lock\r
+#define ACQUIRE_SPINLOCK(x, y) \\r
+ lock bts dword ptr [x], 0; \\r
+ jb y\r
+#define RELEASE_SPINLOCK(x) mov byte ptr [x], 0\r
+#define SPIN_ON_LOCK(x, y) \\r
+1: \\r
+ test dword ptr [x], 1; \\r
+ jz y; \\r
+ pause; \\r
+ jmp 1b\r
#else\r
#define LOCK\r
+#define ACQUIRE_SPINLOCK(x, y)\r
+#define RELEASE_SPINLOCK(x) \r
#endif\r
\r
-.global @Exfi386InterlockedIncrementLong@4\r
-@Exfi386InterlockedIncrementLong@4:\r
- LOCK\r
- addl $1,(%ecx)\r
- lahf\r
- andl $0xC000, %eax\r
- ret\r
+/* FUNCTIONS ****************************************************************/\r
\r
-.global @Exfi386InterlockedDecrementLong@4\r
-@Exfi386InterlockedDecrementLong@4:\r
- LOCK\r
- subl $1,(%ecx)\r
- lahf\r
- andl $0xC000, %eax\r
- ret\r
+/*\r
+ * NOTE: These functions must obey the following rules:\r
+ * - Acquire locks only on MP systems.\r
+ * - Be safe at HIGH_LEVEL (no paged access).\r
+ * - Preserve flags.\r
+ * - Disable interrups.\r
+ */\r
\r
-.global @Exfi386InterlockedExchangeUlong@8\r
-@Exfi386InterlockedExchangeUlong@8:\r
- LOCK\r
- xchgl %edx,(%ecx)\r
- movl %edx,%eax\r
- ret\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedAddLargeInteger(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedAddLargeInteger@16\r
+_ExInterlockedAddLargeInteger@16:\r
+\r
+ /* Prepare stack frame */\r
+ push ebp\r
+ mov ebp, esp\r
+ sub esp, 8\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov eax, [ebp+20]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start1:\r
+ pushfd\r
+ cli\r
+\r
+ /* Acquire lock */\r
+ ACQUIRE_SPINLOCK(eax, .spin1)\r
+\r
+ /* Do the calculation */\r
+ mov eax, [ebp+8]\r
+ mov ecx, [eax]\r
+ mov edx, [eax+4]\r
+\r
+ /* Save result */\r
+ mov [ebp-8], ecx\r
+ mov [ebp-4], edx\r
+\r
+ /* Add increment */\r
+ add ecx, [ebp+12]\r
+ adc edx, [ebp+16]\r
+\r
+ /* Save result */\r
+ mov [eax], ecx\r
+ mov [eax+4], edx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov eax, [ebp+20]\r
+ RELEASE_SPINLOCK(eax)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Restore frame and return values */\r
+ mov eax, [ebp-8]\r
+ mov edx, [ebp-4]\r
+ mov esp, ebp\r
+ pop ebp\r
+ ret 16\r
+\r
+#ifdef CONFIG_SMP\r
+.spin1:\r
+ popfd\r
+ SPIN_ON_LOCK(eax, .start1)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedAddUlong@12\r
+_ExInterlockedAddUlong@12:\r
+\r
+ /* Save flags and disable interrupts */\r
+ pushfd\r
+\r
+ /* Get lock address */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [ebp+16]\r
+#endif\r
+.start2:\r
+ cli\r
+\r
+ /* Acquire lock */\r
+ ACQUIRE_SPINLOCK(edx, .spin2)\r
+\r
+ /* Do the calculation */\r
+ mov ecx, [esp+8]\r
+#ifdef CONFIG_SMP\r
+ mov eax, [ecx]\r
+ add eax, [esp+12]\r
+\r
+ /* Save result */\r
+ mov [ecx], eax\r
+#else\r
+\r
+ /* Do the calculation */\r
+ mov edx, [ecx]\r
+ mov eax, edx\r
+ add edx, [esp+12]\r
+\r
+ /* Save result */\r
+ mov [ecx], edx\r
+#endif\r
+\r
+ /* Release lock, restore flags and return */\r
+#ifdef CONFIG_SMP\r
+ sub eax, [esp+12]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+ popfd\r
+ ret 12\r
+\r
+#ifdef CONFIG_SMP\r
+.spin2:\r
+ popfd\r
+ pushfd\r
+ SPIN_ON_LOCK(eax, .start2)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedInsertHeadList@12\r
+_ExInterlockedInsertHeadList@12:\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+12]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start3:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin3)\r
+\r
+ /* Get list pointers */\r
+ mov eax, [esp+8]\r
+ mov ecx, [esp+12]\r
+ mov edx, [eax]\r
+\r
+ /* Do the insert */\r
+ mov [ecx], edx\r
+ mov [ecx+4], eax\r
+ mov [eax], ecx\r
+ mov [edx+4], ecx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov ecx, [esp+16]\r
+ RELEASE_SPINLOCK(ecx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* check if the list was empty and return NULL */\r
+ xor eax, edx\r
+ jz 2f\r
+\r
+ /* Return pointer */\r
+ mov eax, edx\r
+\r
+2:\r
+ ret 12\r
+\r
+#ifdef CONFIG_SMP\r
+.spin3:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start3)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedInsertTailList(IN PLIST_ENTRY ListHead,\r
+ * IN PLIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedInsertTailList@12\r
+_ExInterlockedInsertTailList@12:\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+12]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start4:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin4)\r
+\r
+ /* Get list pointers */\r
+ mov eax, [esp+8]\r
+ mov ecx, [esp+12]\r
+ mov edx, [eax+4]\r
+\r
+ /* Do the insert */\r
+ mov [ecx], eax\r
+ mov [ecx+4], edx\r
+ mov [eax+4], ecx\r
+ mov [edx], ecx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov ecx, [esp+16]\r
+ RELEASE_SPINLOCK(ecx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* check if the list was empty and return NULL */\r
+ xor eax, edx\r
+ jz 2f\r
+\r
+ /* Return pointer */\r
+ mov eax, edx\r
+\r
+2:\r
+ ret 12\r
+\r
+#ifdef CONFIG_SMP\r
+.spin4:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start4)\r
+#endif\r
+\r
+/*PLIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedRemoveHeadList@8\r
+_ExInterlockedRemoveHeadList@8:\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+8]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start5:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin5)\r
+\r
+ /* Get list pointers */\r
+ mov edx, [esp+8]\r
+ mov eax, [edx]\r
+\r
+ /* Check if it's empty */\r
+ cmp eax, edx\r
+ je 2f\r
+\r
+ /* Get next entry and do deletion */\r
+ mov ecx, [eax]\r
+ mov [edx], ecx\r
+ mov [ecx+4], edx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov ecx, [esp+12]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret 8\r
\r
+2:\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+12]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return empty list */\r
+ xor eax, eax\r
+ ret 8\r
+\r
+#ifdef CONFIG_SMP\r
+.spin5:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start5)\r
+#endif\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedPopEntryList@8\r
+_ExInterlockedPopEntryList@8:\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+8]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start6:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin6)\r
+\r
+ /* Get list pointers */\r
+ mov edx, [esp+8]\r
+ mov eax, [ecx]\r
+\r
+ /* Check if it's empty */\r
+ or eax, eax\r
+ je 3f\r
+\r
+ /* Get next entry and do deletion */\r
+ mov edx, [eax]\r
+ mov [ecx], edx\r
+\r
+2:\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov ecx, [esp+12]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret 8\r
+\r
+3:\r
+ /* Return empty list */\r
+ xor eax, eax\r
+ jmp 2b\r
+\r
+#ifdef CONFIG_SMP\r
+.spin6:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start6)\r
+#endif\r
+\r
+/*PSINGLE_LIST_ENTRY\r
+ *NTAPI\r
+ *ExInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,\r
+ * IN PSINGLE_LIST_ENTRY ListEntry,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedPushEntryList@12\r
+_ExInterlockedPushEntryList@12:\r
+\r
+ /* Save lock pointer */\r
+#ifdef CONFIG_SMP\r
+ mov edx, [esp+12]\r
+#endif\r
+\r
+ /* Save flags and disable interrupts */\r
+.start7:\r
+ pushfd\r
+ cli\r
+ ACQUIRE_SPINLOCK(edx, .spin7)\r
+\r
+ /* Get list pointers */\r
+ mov edx, [esp+8]\r
+ mov eax, [edx]\r
+ mov ecx, [esp+12]\r
+\r
+ /* Do push */\r
+ mov [ecx], eax\r
+ mov [edx], ecx\r
+\r
+ /* Release lock */\r
+#ifdef CONFIG_SMP\r
+ mov ecx, [esp+16]\r
+ RELEASE_SPINLOCK(edx)\r
+#endif\r
+\r
+ /* Restore flags */\r
+ popfd\r
+\r
+ /* Return */\r
+ ret 12\r
+\r
+#ifdef CONFIG_SMP\r
+.spin7:\r
+ popfd\r
+ SPIN_ON_LOCK(edx, .start7)\r
+#endif\r
+\r
+/*INTERLOCKED_RESULT\r
+ *NTAPI\r
+ *ExInterlockedIncrementLong(IN PLONG Addend,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedIncrementLong@8\r
+_ExInterlockedIncrementLong@8:\r
+\r
+ /* Get addend */\r
+ mov eax, [esp+4]\r
+\r
+ /* Do the op */\r
+ LOCK add dword ptr [eax], 1\r
+\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret 8\r
+\r
+/*INTERLOCKED_RESULT\r
+ *NTAPI\r
+ *ExInterlockedDecrementLong(IN PLONG Addend,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedDecrementLong@8\r
+_ExInterlockedDecrementLong@8:\r
+\r
+ /* Get addend */\r
+ mov eax, [esp+4]\r
+\r
+ /* Do the op */\r
+ LOCK sub dword ptr [eax], 1\r
+\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret 8\r
+\r
+/*ULONG\r
+ *NTAPI\r
+ *ExInterlockedExchangeUlong(IN PULONG Taget,\r
+ * IN ULONG Value,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _ExInterlockedExchangeUlong@12\r
+_ExInterlockedExchangeUlong@12:\r
+\r
+ /* Get pointers */\r
+ mov edx, [esp+4]\r
+ mov eax, [esp+8]\r
+\r
+#ifdef CONFIG_SMP\r
+ /* On MP, do the exchange */\r
+ xchg [edx], eax\r
+#else\r
+ /* On UP, disable interrupts and save flags */\r
+ pushfd\r
+ cli\r
+\r
+ /* Switch values */\r
+ mov eax, [edx]\r
+ mov [edx], ecx\r
+ popfd\r
+#endif\r
+\r
+ /* Return */\r
+ ret 12\r
+\r
+/*INTERLOCKED_RESULT\r
+ *NTAPI\r
+ *Exi386InterlockedIncrementLong(IN PLONG Addend,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
.global _Exi386InterlockedIncrementLong@4\r
_Exi386InterlockedIncrementLong@4:\r
- movl 4(%esp),%eax\r
- LOCK\r
- addl $1,(%eax)\r
- lahf\r
- andl $0xC000, %eax\r
- ret $4\r
\r
+ /* Get addend */\r
+ mov eax, [esp+4]\r
+\r
+ /* Do the op */\r
+ LOCK add dword ptr [eax], 1\r
+\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret 4\r
+\r
+/*INTERLOCKED_RESULT\r
+ *NTAPI\r
+ *Exi386InterlockedDecrementLong(IN PLONG Addend,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
.global _Exi386InterlockedDecrementLong@4\r
_Exi386InterlockedDecrementLong@4:\r
- movl 4(%esp),%eax\r
- LOCK\r
- subl $1,(%eax)\r
- lahf\r
- andl $0xC000, %eax\r
- ret $4\r
-\r
-.global _Exi386InterlockedExchangeUlong@8\r
-_Exi386InterlockedExchangeUlong@8:\r
- movl 4(%esp),%edx\r
- movl 8(%esp),%eax\r
- LOCK\r
- xchgl %eax,(%edx)\r
- ret $8\r
\r
-.global @InterlockedIncrement@4\r
-@InterlockedIncrement@4:\r
- movl $1,%eax\r
- LOCK\r
- xaddl %eax,(%ecx)\r
- incl %eax\r
- ret\r
+ /* Get addend */\r
+ mov eax, [esp+4]\r
\r
-.global @InterlockedDecrement@4\r
-@InterlockedDecrement@4:\r
- movl $-1,%eax\r
- LOCK\r
- xaddl %eax,(%ecx)\r
- decl %eax\r
- ret\r
+ /* Do the op */\r
+ LOCK sub dword ptr [eax], 1\r
\r
-.global @InterlockedExchange@8\r
-@InterlockedExchange@8:\r
- LOCK\r
- xchgl %edx,(%ecx)\r
- movl %edx,%eax\r
- ret\r
+ /* Return */\r
+ lahf\r
+ and eax, EFLAG_SELECT\r
+ ret 8\r
\r
-.global @InterlockedExchangeAdd@8\r
-@InterlockedExchangeAdd@8:\r
- LOCK\r
- xaddl %edx,(%ecx)\r
- movl %edx,%eax\r
- ret\r
+/*ULONG\r
+ *NTAPI\r
+ *Exi386InterlockedExchangeUlong(IN PULONG Taget,\r
+ * IN ULONG Value,\r
+ * IN PKSPIN_LOCK Lock)\r
+ */\r
+.global _Exi386InterlockedExchangeUlong@12\r
+_Exi386InterlockedExchangeUlong@12:\r
\r
-.global @InterlockedClearBit@8\r
-@InterlockedClearBit@8:\r
- LOCK\r
- btr %edx,(%ecx)\r
- setc %al\r
- ret\r
+ /* Get pointers */\r
+ mov edx, [esp+4]\r
+ mov eax, [esp+8]\r
\r
-.global @InterlockedCompareExchange@12\r
-@InterlockedCompareExchange@12:\r
- movl 4(%esp),%eax\r
- LOCK\r
- cmpxchg %edx,(%ecx)\r
- ret $4\r
+#ifdef CONFIG_SMP\r
+ /* On MP, do the exchange */\r
+ xchg [edx], eax\r
+#else\r
+ /* On UP, disable interrupts and save flags */\r
+ pushfd\r
+ cli\r
+\r
+ /* Switch values */\r
+ mov eax, [edx]\r
+ mov [edx], ecx\r
+ popfd\r
+#endif\r
\r
+ /* Return */\r
+ ret 8\r
+\r
+/* Ugly hack: Use InterlockedExchange64 */\r
+.att_syntax\r
.global @ExfpInterlockedExchange64@8\r
@ExfpInterlockedExchange64@8:\r
pushl %ebx\r
popl %esi\r
popl %ebx\r
ret\r
-\r
-.global @ExfInterlockedCompareExchange64@12\r
-@ExfInterlockedCompareExchange64@12:\r
- pushl %ebx\r
- pushl %esi\r
- movl %ecx,%esi\r
- movl (%edx),%ebx\r
- movl 4(%edx),%ecx\r
- movl 12(%esp),%edx\r
- movl (%edx),%eax\r
- movl 4(%edx),%edx\r
- LOCK\r
- cmpxchg8b (%esi)\r
- popl %esi\r
- popl %ebx\r
- ret $4\r
-\r
+ \r
/* EOF */\r
+++ /dev/null
-/* $Id$
- *
- * COPYRIGHT: See COPYING in the top level directory
- * PROJECT: ReactOS kernel
- * FILE: ntoskrnl/ex/interlck.c
- * PURPOSE: Implements interlocked functions
- *
- * PROGRAMMERS: David Welch (welch@mcmail.com)
- */
-
-/* INCLUDES *****************************************************************/
-
-#include <ntoskrnl.h>
-#define NDEBUG
-#include <internal/debug.h>
-
-/* FUNCTIONS *****************************************************************/
-
-#undef ExInterlockedDecrementLong
-
-/*
- * @implemented
- */
-INTERLOCKED_RESULT STDCALL
-ExInterlockedDecrementLong (PLONG Addend,
- PKSPIN_LOCK Lock)
-/*
- * Obsolete, use InterlockedDecrement instead
- */
-{
- KIRQL oldlvl;
- LONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Addend;
- (*Addend)--;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-#undef ExInterlockedExchangeUlong
-
-/*
- * @implemented
- */
-ULONG STDCALL
-ExInterlockedExchangeUlong (PULONG Target,
- ULONG Value,
- PKSPIN_LOCK Lock)
-/*
- * Obsolete, use InterlockedExchange instead
- */
-{
- KIRQL oldlvl;
- LONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Target;
- *Target = Value;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-
-#undef ExInterlockedAddUlong
-
-/*
- * @implemented
- */
-ULONG STDCALL
-ExInterlockedAddUlong (PULONG Addend,
- ULONG Increment,
- PKSPIN_LOCK Lock)
-/*
- * ExInterlockedAddUlong adds an unsigned long value to a given unsigned
- * integer as an atomic operation.
- *
- * ADDEND = Points to an unsigned long integer whose value is to be adjusted
- * by the Increment value.
- *
- * INCREMENT = Is an unsigned long integer to be added.
- *
- * LOCK = Points to a spinlock to be used to synchronize access to ADDEND.
- *
- * Returns:
- *
- * The original value of the unsigned integer pointed to by ADDEND.
- */
-{
- KIRQL oldlvl;
- ULONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Addend;
- *Addend += Increment;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-/*
- * @implemented
- */
-LARGE_INTEGER STDCALL
-ExInterlockedAddLargeInteger (PLARGE_INTEGER Addend,
- LARGE_INTEGER Increment,
- PKSPIN_LOCK Lock)
-/*
- * Adds two large integer values as an atomic operation.
- *
- * ADDEND = Pointer to a large integer value that will have INCREMENT added.
- *
- * INCREMENT = Value to be added.
- *
- * LOCK = Spinlock used to synchronize access to ADDEND.
- *
- * Returns:
- *
- * The original value of the large integer pointed to by ADDEND.
- */
-{
- KIRQL oldlvl;
- LARGE_INTEGER oldval;
-
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
-
- oldval.QuadPart = Addend->QuadPart;
- Addend->QuadPart += Increment.QuadPart;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-#undef ExInterlockedIncrementLong
-
-/*
- * @implemented
- */
-INTERLOCKED_RESULT STDCALL
-ExInterlockedIncrementLong (PLONG Addend,
- PKSPIN_LOCK Lock)
-/*
- * Obsolete, use InterlockedIncrement instead.
- */
-{
- KIRQL oldlvl;
- LONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Addend;
- (*Addend)++;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-/*
- * @implemented
- */
-VOID FASTCALL
-ExInterlockedAddLargeStatistic (IN PLARGE_INTEGER Addend,
- IN ULONG Increment)
-/*
- * Undocumented in DDK.
- */
-{
- Addend->QuadPart += Increment;
-}
-
-/*
- * @implemented
- */
-LONGLONG FASTCALL
-ExInterlockedCompareExchange64 (IN OUT PLONGLONG Destination,
- IN PLONGLONG Exchange,
- IN PLONGLONG Comparand,
- IN PKSPIN_LOCK Lock)
-/*
- * Undocumented in DDK.
- */
-{
- KIRQL oldlvl;
- LONGLONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Destination;
- if (*Destination == *Comparand)
- {
- *Destination = *Exchange;
- }
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-/*
- * @implemented
- */
-ULONG FASTCALL
-ExfInterlockedAddUlong(PULONG Addend,
- ULONG Increment,
- PKSPIN_LOCK Lock)
-/*
- * ExInterlockedAddUlong adds an unsigned long value to a given unsigned
- * integer as an atomic operation.
- *
- * ADDEND = Points to an unsigned long integer whose value is to be adjusted
- * by the Increment value.
- *
- * INCREMENT = Is an unsigned long integer to be added.
- *
- * LOCK = Points to a spinlock to be used to synchronize access to ADDEND.
- *
- * Returns:
- *
- * The original value of the unsigned integer pointed to by ADDEND.
- */
-{
- KIRQL oldlvl;
- ULONG oldval;
-
- KeAcquireSpinLock (Lock, &oldlvl);
-
- oldval = *Addend;
- *Addend += Increment;
-
- KeReleaseSpinLock (Lock, oldlvl);
-
- return oldval;
-}
-
-/* EOF */
+++ /dev/null
-/* $Id$
- *
- * COPYRIGHT: See COPYING in the top level directory
- * PROJECT: ReactOS kernel
- * FILE: ntoskrnl/ex/list.c
- * PURPOSE: Manages double linked lists, single linked lists and
- * sequenced lists
- *
- * PROGRAMMERS: David Welch (welch@mcmail.com)
- * Casper S. Hornstrup (chorns@users.sourceforge.net)
- */
-
-/* INCLUDES *****************************************************************/
-
-#include <ntoskrnl.h>
-#define NDEBUG
-#include <internal/debug.h>
-
-/* FUNCTIONS *************************************************************/
-
-/*
- * @implemented
- */
-PSLIST_ENTRY
-FASTCALL
-ExInterlockedFlushSList (
- IN PSLIST_HEADER ListHead
- )
-{
- PSLIST_ENTRY Old;
-
- Old = &ListHead->Next;
- ListHead->Next.Next = 0;
-
- return Old;
-}
-
-/*
- * @implemented
- */
-PLIST_ENTRY
-STDCALL
-ExInterlockedInsertHeadList(PLIST_ENTRY ListHead,
- PLIST_ENTRY ListEntry,
- PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts an entry at the head of a doubly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- PLIST_ENTRY Old;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(ListHead))
- {
- Old = NULL;
- }
- else
- {
- Old = ListHead->Flink;
- }
- InsertHeadList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
-
- return(Old);
-}
-
-
-/*
- * @implemented
- */
-PLIST_ENTRY
-STDCALL
-ExInterlockedInsertTailList(PLIST_ENTRY ListHead,
- PLIST_ENTRY ListEntry,
- PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts an entry at the tail of a doubly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- PLIST_ENTRY Old;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(ListHead))
- {
- Old = NULL;
- }
- else
- {
- Old = ListHead->Blink;
- }
- InsertTailList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
-
- return(Old);
-}
-
-
-/*
- * @implemented
- */
-PLIST_ENTRY
-STDCALL
-ExInterlockedRemoveHeadList(PLIST_ENTRY Head,
- PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes the head of a double linked list
- * ARGUMENTS:
- * Head = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PLIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(Head))
- {
- ret = NULL;
- }
- else
- {
- ret = RemoveHeadList(Head);
- }
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-PLIST_ENTRY
-STDCALL
-ExInterlockedRemoveTailList(PLIST_ENTRY Head,
- PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes the tail of a double linked list
- * ARGUMENTS:
- * Head = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PLIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(Head))
- {
- ret = NULL;
- }
- else
- {
- ret = RemoveTailList(Head);
- }
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-#undef ExInterlockedPopEntrySList
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY
-FASTCALL
-ExInterlockedPopEntrySList(IN PSLIST_HEADER ListHead,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes (pops) an entry from a sequenced list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PSINGLE_LIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret = PopEntryList(&ListHead->Next);
- if (ret)
- {
- ListHead->Depth--;
- ListHead->Sequence++;
- }
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-#undef ExInterlockedPushEntrySList
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY
-FASTCALL
-ExInterlockedPushEntrySList(IN PSLIST_HEADER ListHead,
- IN PSINGLE_LIST_ENTRY ListEntry,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts (pushes) an entry into a sequenced list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- KIRQL oldlvl;
- PSINGLE_LIST_ENTRY ret;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret=ListHead->Next.Next;
- PushEntryList(&ListHead->Next,ListEntry);
- ListHead->Depth++;
- ListHead->Sequence++;
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY
-STDCALL
-ExInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes (pops) an entry from a singly list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PSINGLE_LIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret = PopEntryList(ListHead);
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY
-STDCALL
-ExInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
- IN PSINGLE_LIST_ENTRY ListEntry,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts (pushes) an entry into a singly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- KIRQL oldlvl;
- PSINGLE_LIST_ENTRY ret;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret=ListHead->Next;
- PushEntryList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PLIST_ENTRY FASTCALL
-ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
- IN PLIST_ENTRY ListEntry,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts an entry at the head of a doubly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- PLIST_ENTRY Old;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(ListHead))
- {
- Old = NULL;
- }
- else
- {
- Old = ListHead->Flink;
- }
- InsertHeadList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
-
- return(Old);
-}
-
-
-/*
- * @implemented
- */
-PLIST_ENTRY FASTCALL
-ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
- IN PLIST_ENTRY ListEntry,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts an entry at the tail of a doubly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- PLIST_ENTRY Old;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(ListHead))
- {
- Old = NULL;
- }
- else
- {
- Old = ListHead->Blink;
- }
- InsertTailList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
-
- return(Old);
-}
-
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY FASTCALL
-ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes (pops) an entry from a singly list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PSINGLE_LIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret = PopEntryList(ListHead);
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PSINGLE_LIST_ENTRY FASTCALL
-ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
- IN PSINGLE_LIST_ENTRY ListEntry,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Inserts (pushes) an entry into a singly linked list
- * ARGUMENTS:
- * ListHead = Points to the head of the list
- * ListEntry = Points to the entry to be inserted
- * Lock = Caller supplied spinlock used to synchronize access
- * RETURNS: The previous head of the list
- */
-{
- KIRQL oldlvl;
- PSINGLE_LIST_ENTRY ret;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- ret=ListHead->Next;
- PushEntryList(ListHead,ListEntry);
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PLIST_ENTRY FASTCALL
-ExfInterlockedRemoveHeadList(IN PLIST_ENTRY Head,
- IN PKSPIN_LOCK Lock)
-/*
- * FUNCTION: Removes the head of a double linked list
- * ARGUMENTS:
- * Head = Points to the head of the list
- * Lock = Lock for synchronizing access to the list
- * RETURNS: The removed entry
- */
-{
- PLIST_ENTRY ret;
- KIRQL oldlvl;
-
- KeAcquireSpinLock(Lock,&oldlvl);
- if (IsListEmpty(Head))
- {
- ret = NULL;
- }
- else
- {
- ret = RemoveHeadList(Head);
- }
- KeReleaseSpinLock(Lock,oldlvl);
- return(ret);
-}
-
-
-/*
- * @implemented
- */
-PSLIST_ENTRY
-FASTCALL
-InterlockedPopEntrySList(IN PSLIST_HEADER ListHead)
-{
- SLIST_HEADER newslh, oldslh;
- PSLIST_ENTRY le;
-
- do
- {
- oldslh = *(volatile SLIST_HEADER *)ListHead;
- le = oldslh.Next.Next;
- if(le == NULL)
- {
- /* nothing to do */
- return NULL;
- }
- newslh.Sequence = oldslh.Sequence + 1;
- newslh.Depth = oldslh.Depth - 1;
- newslh.Next.Next = MmSafeReadPtr(&le->Next);
- } while(ExfInterlockedCompareExchange64((PLONGLONG)&ListHead->Alignment,
- (PLONGLONG)&newslh.Alignment,
- (PLONGLONG)&oldslh.Alignment) != (LONGLONG)oldslh.Alignment);
-
- return le;
-}
-
-
-/*
- * @implemented
- */
-PSLIST_ENTRY
-FASTCALL
-InterlockedPushEntrySList(IN PSLIST_HEADER ListHead,
- IN PSLIST_ENTRY ListEntry)
-{
- SLIST_HEADER newslh, oldslh;
-
- newslh.Next.Next = ListEntry;
-
- do
- {
- oldslh = *(volatile SLIST_HEADER *)ListHead;
- newslh.Depth = oldslh.Depth + 1;
- newslh.Sequence = oldslh.Sequence + 1;
- ListEntry->Next = oldslh.Next.Next;
- } while(ExfInterlockedCompareExchange64((PLONGLONG)&ListHead->Alignment,
- (PLONGLONG)&newslh.Alignment,
- (PLONGLONG)&oldslh.Alignment) != (LONGLONG)oldslh.Alignment);
-
- return oldslh.Next.Next;
-}
-
-/* EOF */
+++ /dev/null
-/* $Id$
- *
- * COPYRIGHT: See COPYING in the top level directory
- * PROJECT: ReactOS kernel
- * FILE: ntoskrnl/ex/synch.c
- * PURPOSE: Synchronization Functions (Pushlocks)
- *
- * PROGRAMMERS: No programmer listed.
- */
-
-/* INCLUDES *****************************************************************/
-
-#include <ntoskrnl.h>
-#define NDEBUG
-#include <internal/debug.h>
-
-/* FUNCTIONS *****************************************************************/
-
-/*
- * @unimplemented
- */
-PVOID
-FASTCALL
-ExfAcquirePushLockExclusive (
- PVOID Lock
- )
-{
- UNIMPLEMENTED;
- return NULL;
-}
-
-/*
- * @unimplemented
- */
-PVOID
-FASTCALL
-ExfAcquirePushLockShared (
- PVOID Lock
- )
-{
- UNIMPLEMENTED;
- return NULL;
-}
-
-/*
- * @unimplemented
- */
-PVOID
-FASTCALL
-ExfReleasePushLock (
- PVOID Lock
- )
-{
- UNIMPLEMENTED;
- return NULL;
-}
-
-/* EOF */
KeAcquireGuardedMutexUnsafe(PKGUARDED_MUTEX GuardedMutex)
{
/* Remove the lock */
- if (!InterlockedClearBit(&GuardedMutex->Count, 0))
+ if (!InterlockedBitTestAndReset(&GuardedMutex->Count, 0))
{
/* The Guarded Mutex was already locked, enter contented case */
KiAcquireGuardedMutexContented(GuardedMutex);
KeEnterGuardedRegion();
/* Remove the lock */
- if (InterlockedClearBit(&GuardedMutex->Count, 0))
+ if (InterlockedBitTestAndReset(&GuardedMutex->Count, 0))
{
/* Re-enable APCs */
KeLeaveGuardedRegion();
@Exfi386InterlockedExchangeUlong@8
@Exfi386InterlockedIncrementLong@4
Exi386InterlockedDecrementLong@4
-Exi386InterlockedExchangeUlong@8
+Exi386InterlockedExchangeUlong@12
Exi386InterlockedIncrementLong@4
FsRtlAcquireFileExclusive@4
FsRtlAddLargeMcbEntry@28
<if property="ARCH" value="i386">
<directory name="i386">
<file>interlck_asm.S</file>
+ <file>fastinterlck_asm.S</file>
</directory>
</if>
<compilationunit name="ex.c">
<file>fmutex.c</file>
<file>handle.c</file>
<file>init.c</file>
- <file>interlck.c</file>
- <file>list.c</file>
<file>lookas.c</file>
<file>mutant.c</file>
<file>power.c</file>