[ros-diffs] [ion] 20474: - Rewrite Executive Interlocked Functions to actually work as supposed and also immensly increase their speed, most notably on non-SMP machines:

ion at svn.reactos.org ion at svn.reactos.org
Sat Dec 31 00:53:35 CET 2005


- Rewrite Executive Interlocked Functions to actually work as supposed
and also immensly increase their speed, most notably on non-SMP
machines:
  * The functions now disable interrupts as they should. This is a core
functionality of the ExInterlocked functions which we did not do.
  * The functions now preserve EFLAGS as they should. This is another
core functionality and point of ExInterlocked functions, which we did
not do.
  * Use KiAcquire/ReleaseSpinlock equivalents instead of going through
HAL. Also make them macros which can be later used in other assembly
files.
  * Write optimized versions for non-SMP machines. Namely, the spinlock
macros are coded to be no-ops on UP machines, since they are not needed.
  * Write all functions in assembly, so we can cleanly optimize them and
properly restore/save EFLAGS and use CLI.
- Use the right interlocked macro in guarded mutex implementation.
- Fix export of Exi386InterlockedExchangeUlong
- Use documented EFLAGS definition instead of hard-coded value in some
interlocked functions.
Modified: trunk/reactos/include/ndk/asm.h
Added: trunk/reactos/ntoskrnl/ex/i386/fastinterlck_asm.S
Modified: trunk/reactos/ntoskrnl/ex/i386/interlck_asm.S
Deleted: trunk/reactos/ntoskrnl/ex/interlck.c
Deleted: trunk/reactos/ntoskrnl/ex/list.c
Deleted: trunk/reactos/ntoskrnl/ex/synch.c
Modified: trunk/reactos/ntoskrnl/ke/gmutex.c
Modified: trunk/reactos/ntoskrnl/ntoskrnl.def
Modified: trunk/reactos/ntoskrnl/ntoskrnl.xml
  _____  

Modified: trunk/reactos/include/ndk/asm.h
--- trunk/reactos/include/ndk/asm.h	2005-12-30 23:50:05 UTC (rev
20473)
+++ trunk/reactos/include/ndk/asm.h	2005-12-30 23:53:13 UTC (rev
20474)
@@ -289,6 +289,9 @@

 #define EFLAGS_ALIGN_CHECK                      0x40000
 #define EFLAGS_VIF                              0x80000
 #define EFLAGS_VIP                              0x100000
+#define EFLAG_SIGN                              0x8000
+#define EFLAG_ZERO                              0x4000
+#define EFLAG_SELECT                            (EFLAG_SIGN |
EFLAG_ZERO)
 
 //
 // CR0
  _____  

Added: trunk/reactos/ntoskrnl/ex/i386/fastinterlck_asm.S
--- trunk/reactos/ntoskrnl/ex/i386/fastinterlck_asm.S	2005-12-30
23:50:05 UTC (rev 20473)
+++ trunk/reactos/ntoskrnl/ex/i386/fastinterlck_asm.S	2005-12-30
23:53:13 UTC (rev 20474)
@@ -0,0 +1,701 @@

+/*
+ * COPYRIGHT:       See COPYING in the top level directory
+ * PROJECT:         ReactOS kernel
+ * FILE:            ntoskrnl/ex/i386/fastinterlck_asm.S
+ * PURPOSE:         FASTCALL Interlocked Functions
+ * PROGRAMMERS:     Alex Ionescu (alex at relsoft.net)
+ */
+ 
+/* INCLUDES
******************************************************************/
+#include <ndk/asm.h>
+.intel_syntax noprefix
+
+//#define CONFIG_SMP
+
+#ifdef CONFIG_SMP
+#define LOCK lock
+#define ACQUIRE_SPINLOCK(x, y) \
+    lock bts dword ptr [x], 0; \
+    jb y
+#define RELEASE_SPINLOCK(x) mov byte ptr [x], 0
+#define SPIN_ON_LOCK(x, y) \
+1: \
+    test dword ptr [x], 1; \
+    jz y; \
+    pause; \
+    jmp 1b
+#else
+#define LOCK
+#define ACQUIRE_SPINLOCK(x, y)
+#define RELEASE_SPINLOCK(x) 
+#endif
+
+/* FUNCTIONS
****************************************************************/
+
+/*
+ * NOTE: These functions must obey the following rules:
+ *  - Acquire locks only on MP systems.
+ *  - Be safe at HIGH_LEVEL (no paged access).
+ *  - Preserve flags.
+ *  - Disable interrups.
+ */
+
+/*VOID
+ *FASTCALL
+ *ExInterlockedAddLargeStatistic(IN PLARGE_INTEGER Addend,
+ *                               IN ULONG Increment)
+ */
+.global @ExInterlockedAddLargeStatistic at 8
+ at ExInterlockedAddLargeStatistic@8:
+
+#ifdef CONFIG_SMP
+    /* Do the addition */
+    lock add [ecx], edx
+
+    /* Check for carry bit and return */
+    jb 1f
+    ret
+
+1:
+    /* Add carry */
+    lock adc dword ptr [ecx+4], 0
+#else
+    /* Do the addition and add the carry */
+    add dword ptr [ecx], edx
+    adc dword ptr [ecx+4], 0
+#endif
+    /* Return */
+    ret
+
+/*ULONG
+ *FASTCALL
+ *ExfInterlockedAddUlong(IN PULONG Addend,
+ *                       IN ULONG Increment,
+ *                       IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedAddUlong at 12
+ at ExfInterlockedAddUlong@12:
+
+    /* Save flags */
+    pushfd
+
+#ifdef CONFIG_SMP
+    /* Get lock address */
+    mov eax, [esp+8]
+.start1:
+#endif
+    /* Disable interrupts */
+    cli
+
+    /* Acquire lock */
+    ACQUIRE_SPINLOCK(eax, .spin1)
+
+    /* Do the add */
+    mov eax, [ecx]
+    add [ecx], edx
+
+#ifdef CONFIG_SMP
+    /* Get spinlock address and release it */
+    mov edx, [esp+8]
+    RELEASE_SPINLOCK(edx)
+#endif
+
+    /* Restore flags and return */
+    popfd
+    ret 4
+
+#ifdef CONFIG_SMP
+.spin1:
+    /* Restore flags and spin */
+    popfd
+    pushfd
+    SPIN_ON_LOCK(eax, .start1)
+#endif
+
+/*PLIST_ENTRY
+ *FASTCALL
+ *ExfInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
+ *                             IN PLIST_ENTRY ListEntry,
+ *                             IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedInsertHeadList at 12
+ at ExfInterlockedInsertHeadList@12:
+
+#ifdef CONFIG_SMP
+    /* Save lock address */
+    push esi
+    mov esi, [esp+8]
+#endif
+
+    /* Save flags and disable interrupts */
+    pushfd
+.start2:
+    cli
+
+    /* Acquire lock */
+    ACQUIRE_SPINLOCK(esi, .spin2)
+
+    /* Get list pointer */
+    mov eax, [ecx]
+
+    /* Do the insert */
+    mov [edx], eax
+    mov [edx+4], ecx
+    mov [ecx], edx
+    mov [eax+4], edx
+
+    /* Release lock and restore flags */
+    RELEASE_SPINLOCK(esi)
+    popfd
+
+#ifdef CONFIG_SMP
+    pop esi
+#endif
+
+    /* Check if list was empty */
+    xor eax, ecx
+    jz 2f
+
+    /* Return list pointer */
+    xor eax, ecx
+2:
+    ret 4
+
+#ifdef CONFIG_SMP
+.spin2:
+    popfd
+    pushfd
+    SPIN_ON_LOCK(esi, .start2)
+#endif
+
+/*PLIST_ENTRY
+ *NTAPI
+ *ExfInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
+ *                            IN PLIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedInsertTailList at 12
+ at ExfInterlockedInsertTailList@12:
+
+#ifdef CONFIG_SMP
+    /* Save lock address */
+    push esi
+    mov esi, [esp+8]
+#endif
+
+    /* Save flags and disable interrupts */
+    pushfd
+.start3:
+    cli
+
+    /* Acquire lock */
+    ACQUIRE_SPINLOCK(esi, .spin3)
+
+    /* Get list pointer */
+    mov eax, [ecx+4]
+
+    /* Do the insert */
+    mov [edx], ecx
+    mov [edx+4], eax
+    mov [ecx+4], edx
+    mov [eax], edx
+
+    /* Release lock and restore flags */
+    RELEASE_SPINLOCK(esi)
+    popfd
+
+#ifdef CONFIG_SMP
+    pop esi
+#endif
+
+    /* Check if list was empty */
+    xor eax, ecx
+    jz 2f
+
+    /* Return list pointer */
+    xor eax, ecx
+2:
+    ret 4
+
+#ifdef CONFIG_SMP
+.spin3:
+    popfd
+    pushfd
+    SPIN_ON_LOCK(esi, .start3)
+#endif
+
+/*PLIST_ENTRY
+ *FASTCALL
+ *ExfInterlockedRemoveHeadList(IN PLIST_ENTRY ListHead,
+ *                             IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedRemoveHeadList at 8
+ at ExfInterlockedRemoveHeadList@8:
+
+    /* Save flags and disable interrupts */
+.start4:
+    pushfd
+    cli
+    ACQUIRE_SPINLOCK(edx, .spin4)
+
+    /* Get list pointer */
+    mov eax, [ecx]
+
+    /* Check if it's empty */
+    cmp eax, ecx
+    je 2f
+
+    /* Get the next entry and do the deletion*/
+#ifdef CONFIG_SMP
+    push ebx
+    mov ebx, [eax]
+    mov [ecx], ebx
+    mov [ebx+4], ecx
+#else
+    mov edx, [eax]
+    mov [ecx], edx
+    mov [edx+4], ecx
+#endif
+
+    /* Release lock */
+#ifdef CONFIG_SMP
+    RELEASE_SPINLOCK(edx)
+    pop ebx
+#endif
+
+    /* Restore flags */
+    popfd
+
+    /* Return */
+    ret
+
+2:
+    /* Release lock */
+    RELEASE_SPINLOCK(edx)
+
+    /* Restore flags */
+    popfd
+
+    /* Return empty list */
+    xor eax, eax
+    ret
+
+#ifdef CONFIG_SMP
+.spin4:
+    popfd
+    SPIN_ON_LOCK(edx, .start4)
+#endif
+
+/*PSINGLE_LIST_ENTRY
+ *FASTCALL
+ *ExfInterlockedPopEntryList(IN PSINGLE_LIST_ENTRY ListHead,
+ *                          IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedPopEntryList at 8
+ at ExfInterlockedPopEntryList@8:
+
+    /* Save flags and disable interrupts */
+.start5:
+    pushfd
+    cli
+    ACQUIRE_SPINLOCK(edx, .spin5)
+
+    /* Get list pointer */
+    mov eax, [ecx]
+
+    /* Check if it's empty */
+    or eax, eax
+    je 3f
+
+    /* Get next entry and do deletion */
+#ifdef CONFIG_SMP
+    push edx
+#endif
+    mov edx, [eax]
+    mov [ecx], edx
+#ifdef CONFIG_SMP
+    pop edx
+#endif
+
+2:
+    /* Release lock */
+    RELEASE_SPINLOCK(edx)
+
+    /* Restore flags */
+    popfd
+
+    /* Return */
+    ret
+
+3:
+    /* Return empty list */
+    xor eax, eax
+    jmp 2b
+
+#ifdef CONFIG_SMP
+.spin5:
+    popfd
+    SPIN_ON_LOCK(edx, .start5)
+#endif
+
+/*PSINGLE_LIST_ENTRY
+ *NTAPI
+ *ExfInterlockedPushEntryList(IN PSINGLE_LIST_ENTRY ListHead,
+ *                            IN PSINGLE_LIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global @ExfInterlockedPushEntryList at 12
+ at ExfInterlockedPushEntryList@12:
+
+    /* Save flags */
+    pushfd
+
+    /* Save lock pointer */
+#ifdef CONFIG_SMP
+    push edx
+    mov edx, [esp+12]
+#endif
+
+    /* Disable interrupts */
+.start6:
+    cli
+#ifdef CONFIG_SMP
+    ACQUIRE_SPINLOCK(edx, .spin6)
+    pop edx
+#endif
+
+    /* Get list pointer */
+    mov eax, [ecx]
+
+    /* Do push */
+    mov [edx], eax
+    mov [ecx], edx
+
+    /* Release lock */
+#ifdef CONFIG_SMP
+    mov edx, [esp+8]
+    RELEASE_SPINLOCK(edx)
+#endif
+
+    /* Restore flags */
+    popfd
+
+    /* Return */
+    ret 4
+
+#ifdef CONFIG_SMP
+.spin6:
+    pop edx
+    popfd
+    pushfd
+    push edx
+    mov edx, [esp+12]
+    SPIN_ON_LOCK(edx, .start6)
+#endif
+
+/*PSINGLE_LIST_ENTRY
+ *NTAPI
+ *ExInterlockedPopEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
+ *                           IN PKSPIN_LOCK Lock)
+ */
+.global @ExInterlockedPopEntrySList at 8
+.global @InterlockedPopEntrySList at 4
+ at ExInterlockedPopEntrySList@8:
+ at InterlockedPopEntrySList@4:
+
+    /* Save registers */
+    push ebx
+    push ebp
+
+    /* Pointer to list */
+    mov ebp, ecx
+
+    /* Get sequence number and link pointer */
+    mov edx, [ebp+4]
+    mov eax, [ebp]
+
+1:
+    /* Check if the list is empty */
+    or eax, eax
+    jz 2f
+
+    /* Copy sequence number and adjust it */
+    lea ecx, [edx-1]
+
+    /* Get next pointer and do the exchange */
+    mov ebx, [eax]
+    LOCK cmpxchg8b qword ptr [ebp]
+    jnz 1b
+
+    /* Restore registers and return */
+2:
+    pop ebp
+    pop ebx
+    ret
+
+/*PSINGLE_LIST_ENTRY
+ *NTAPI
+ *ExInterlockedPushEntrySList(IN PSINGLE_LIST_ENTRY ListHead,
+ *                            IN PSINGLE_LIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global @ExInterlockedPushEntrySList at 12
+ at ExInterlockedPushEntrySList@12:
+
+    /* So we can fall through below */
+    pop [esp]
+
+.global @InterlockedPushEntrySList at 8
+ at InterlockedPushEntrySList@8:
+
+    /* Save registers */
+    push ebx
+    push ebp
+
+    /* Pointer to list */
+    mov ebp, ecx
+    mov ebx, edx
+
+    /* Get sequence number and link pointer */
+    mov edx, [ebp+4]
+    mov eax, [ebp]
+
+1:
+    /* Set link pointer */
+    mov [ebx], eax
+
+    /* Copy sequence number and adjust it */
+    lea ecx, [edx+0x10001]
+
+    /* Do the exchange */
+    LOCK cmpxchg8b qword ptr [ebp]
+    jnz 1b
+
+    /* Restore registers and return */
+2:
+    pop ebp
+    pop ebx
+    ret
+
+/*PSINGLE_LIST_ENTRY
+ *NTAPI
+ *ExInterlockedFlushSList(IN PSINGLE_LIST_ENTRY ListHead)
+ */
+.global @ExInterlockedFlushSList at 4
+ at ExInterlockedFlushSList@4:
+
+    /* Save registers */
+    push ebx
+    push ebp
+
+    /* Clear ebx */
+    xor ebx, ebx
+
+    /* Pointer to list */
+    mov ebp, ecx
+
+    /* Get sequence number and link pointer */
+    mov edx, [ebp+4]
+    mov eax, [ebp]
+
+1:
+    /* Check if the list is empty */
+    or eax, eax
+    jz 2f
+
+    /* Clear sequence and pointer */
+    mov ecx, edx
+    mov cx, bx
+
+    /* Do the exchange */
+    LOCK cmpxchg8b qword ptr [ebp]
+    jnz 1b
+
+    /* Restore registers and return */
+2:
+    pop ebp
+    pop ebx
+    ret
+
+/*INTERLOCKED_RESULT
+ *FASTCALL
+ *Exfi386InterlockedIncrementLong(IN PLONG Addend)
+ */
+.global @Exfi386InterlockedIncrementLong at 4
+ at Exfi386InterlockedIncrementLong@4:
+
+    /* Do the op */
+    LOCK add dword ptr [ecx], 1
+
+    /* Return */
+    lahf
+    and eax, EFLAG_SELECT
+    ret
+
+/*INTERLOCKED_RESULT
+ *FASTCALL
+ *Exfi386InterlockedDecrementLong(IN PLONG Addend)
+ */
+.global @Exfi386InterlockedDecrementLong at 4
+ at Exfi386InterlockedDecrementLong@4:
+
+    /* Do the op */
+    LOCK sub dword ptr [ecx], 1
+
+    /* Return */
+    lahf
+    and eax, EFLAG_SELECT
+    ret
+
+/*ULONG
+ *FASTCALL
+ *Exfi386InterlockedExchangeUlong(IN PULONG Taget,
+ *                                IN ULONG Value)
+ */
+.global @Exfi386InterlockedExchangeUlong at 8
+.global @InterlockedExchange at 8
+ at InterlockedExchange@8:
+ at Exfi386InterlockedExchangeUlong@8:
+
+#ifdef CONFIG_SMP
+    /* On MP, do the exchange */
+    xchg [ecx], edx
+    mov eax, edx
+#else
+    /* On UP, use cmpxchg */
+    mov eax, [ecx]
+1:
+    cmpxchg [ecx], edx
+    jnz 1b
+#endif
+
+    /* Return */
+    ret
+
+/*ULONG
+ *FASTCALL
+ *InterlockedIncrement(IN PLONG Addend)
+ */
+.global @InterlockedIncrement at 4
+ at InterlockedIncrement@4:
+
+    /* Do the op */
+    mov eax, 1
+    LOCK xadd dword ptr [ecx], eax
+
+    /* Return */
+    inc eax
+    ret
+
+/*ULONG
+ *FASTCALL
+ *InterlockedDecrement(IN PLONG Addend)
+ */
+.global @InterlockedDecrement at 4
+ at InterlockedDecrement@4:
+
+    /* Do the op */
+    mov eax, -1
+    LOCK xadd dword ptr [ecx], eax
+
+    /* Return */
+    dec eax
+    ret
+
+/*PVOID
+ *FASTCALL
+ *InterlockedCompareExchange(IN OUT PVOID *Destination,
+ *                           IN PVOID Exchange,
+ *                           IN PVOID Comperand)
+ */
+.global @InterlockedCompareExchange at 12
+ at InterlockedCompareExchange@12:
+
+    /* Get comperand */
+    mov eax, [esp+4]
+
+    /* Do the op */
+    LOCK cmpxchg dword ptr [ecx], edx
+
+    /* Return */
+    ret
+
+/*PVOID
+ *FASTCALL
+ *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
+ *                                IN PLONGLONG Exchange,
+ *                                IN PLONGLONG Comperand)
+ */
+.global @ExfInterlockedCompareExchange64 at 12
+ at ExfInterlockedCompareExchange64@12:
+
+    /* Save registers */
+    push ebp
+    push ebp
+
+    /* Get desination pointer, exchange value and comperand
value/address */
+    mov ebp, ecx
+    mov ebx, [edx]
+    mov ecx, [edx+4]
+    mov edx, [esp+12]
+    mov eax, [edx]
+    mov edx, [edx+4]
+
+    /* Do the op */
+    LOCK cmpxchg8b qword ptr [ebp]
+
+    /* Restore volatiles */
+    pop ebp
+    pop ebx
+
+    /* Return */
+    ret 4
+
+/*PVOID
+ *FASTCALL
+ *ExfInterlockedCompareExchange64(IN PLONGLONG Destination,
+ *                                IN PLONGLONG Exchange,
+ *                                IN PLONGLONG Comperand,
+ *                                IN PKSPIN_LOCK Lock)
+ */
+.global @ExInterlockedCompareExchange64 at 16
+ at ExInterlockedCompareExchange64@16:
+
+    /* Save registers */
+    push ebp
+    push ebp
+
+    /* Get desination pointer, exchange value and comperand
value/address */
+    mov ebp, ecx
+    mov ebx, [edx]
+    mov ecx, [edx+4]
+    mov edx, [esp+12]
+    mov eax, [edx]
+    mov edx, [edx+4]
+
+    /* Do the op */
+    LOCK cmpxchg8b qword ptr [ebp]
+
+    /* Restore volatiles */
+    pop ebp
+    pop ebx
+
+    /* Return */
+    ret 8
+
+/*PVOID
+ *FASTCALL
+ *InterlockedExchangeAdd(IN OUT PLONG Addend,
+ *                       IN LONG Increment)
+ */
+.global @InterlockedExchangeAdd at 8
+ at InterlockedExchangeAdd@8:
+
+    /* Do the op */
+    LOCK xadd dword ptr [ecx], edx
+
+    /* Return */
+    mov eax, edx
+    ret
+
+/* EOF */
  _____  

Modified: trunk/reactos/ntoskrnl/ex/i386/interlck_asm.S
--- trunk/reactos/ntoskrnl/ex/i386/interlck_asm.S	2005-12-30
23:50:05 UTC (rev 20473)
+++ trunk/reactos/ntoskrnl/ex/i386/interlck_asm.S	2005-12-30
23:53:13 UTC (rev 20474)
@@ -1,112 +1,583 @@

-/* $Id: interlck.c 15557 2005-05-28 07:26:38Z hbirr $
- *
+/*
  * COPYRIGHT:       See COPYING in the top level directory
  * PROJECT:         ReactOS kernel
- * FILE:            ntoskrnl/ex/i386/interlck.c
- * PURPOSE:         No purpose listed.
- *
- * PROGRAMMERS:     No programmer listed.
+ * FILE:            ntoskrnl/ex/i386/interlck_asm.S
+ * PURPOSE:         STDCALL Interlocked Functions
+ * PROGRAMMERS:     Alex Ionescu (alex at relsoft.net)
  */
+ 
+/* INCLUDES
******************************************************************/
+#include <ndk/asm.h>
+.intel_syntax noprefix
 
+//#define CONFIG_SMP
+
 #ifdef CONFIG_SMP
 #define LOCK lock
+#define ACQUIRE_SPINLOCK(x, y) \
+    lock bts dword ptr [x], 0; \
+    jb y
+#define RELEASE_SPINLOCK(x) mov byte ptr [x], 0
+#define SPIN_ON_LOCK(x, y) \
+1: \
+    test dword ptr [x], 1; \
+    jz y; \
+    pause; \
+    jmp 1b
 #else
 #define LOCK
+#define ACQUIRE_SPINLOCK(x, y)
+#define RELEASE_SPINLOCK(x) 
 #endif
 
-.global @Exfi386InterlockedIncrementLong at 4
- at Exfi386InterlockedIncrementLong@4:
-	LOCK
-	addl $1,(%ecx)
-	lahf
-	andl $0xC000, %eax
-	ret
+/* FUNCTIONS
****************************************************************/
 
-.global @Exfi386InterlockedDecrementLong at 4
- at Exfi386InterlockedDecrementLong@4:
-	LOCK
-	subl $1,(%ecx)
-	lahf
-	andl $0xC000, %eax
-	ret
+/*
+ * NOTE: These functions must obey the following rules:
+ *  - Acquire locks only on MP systems.
+ *  - Be safe at HIGH_LEVEL (no paged access).
+ *  - Preserve flags.
+ *  - Disable interrups.
+ */
 
-.global @Exfi386InterlockedExchangeUlong at 8
- at Exfi386InterlockedExchangeUlong@8:
-	LOCK
-	xchgl %edx,(%ecx)
-	movl  %edx,%eax
-	ret
+/*PLIST_ENTRY
+ *NTAPI
+ *ExInterlockedAddLargeInteger(IN PLIST_ENTRY ListHead,
+ *                             IN PLIST_ENTRY ListEntry,
+ *                             IN PKSPIN_LOCK Lock)
+ */
+.global _ExInterlockedAddLargeInteger at 16
+_ExInterlockedAddLargeInteger at 16:
 
+    /* Prepare stack frame */
+    push ebp
+    mov ebp, esp
+    sub esp, 8
+
+    /* Save lock pointer */
+#ifdef CONFIG_SMP
+    mov eax, [ebp+20]
+#endif
+
+    /* Save flags and disable interrupts */
+.start1:
+    pushfd
+    cli
+
+    /* Acquire lock */
+    ACQUIRE_SPINLOCK(eax, .spin1)
+
+    /* Do the calculation */
+    mov eax, [ebp+8]
+    mov ecx, [eax]
+    mov edx, [eax+4]
+
+    /* Save result */
+    mov [ebp-8], ecx
+    mov [ebp-4], edx
+
+    /* Add increment */
+    add ecx, [ebp+12]
+    adc edx, [ebp+16]
+
+    /* Save result */
+    mov [eax], ecx
+    mov [eax+4], edx
+
+    /* Release lock */
+#ifdef CONFIG_SMP
+    mov eax, [ebp+20]
+    RELEASE_SPINLOCK(eax)
+#endif
+
+    /* Restore flags */
+    popfd
+
+    /* Restore frame and return values */
+    mov eax, [ebp-8]
+    mov edx, [ebp-4]
+    mov esp, ebp
+    pop ebp
+    ret 16
+
+#ifdef CONFIG_SMP
+.spin1:
+    popfd
+    SPIN_ON_LOCK(eax, .start1)
+#endif
+
+/*PLIST_ENTRY
+ *NTAPI
+ *ExInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
+ *                            IN PLIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global _ExInterlockedAddUlong at 12
+_ExInterlockedAddUlong at 12:
+
+    /* Save flags and disable interrupts */
+    pushfd
+
+    /* Get lock address */
+#ifdef CONFIG_SMP
+    mov edx, [ebp+16]
+#endif
+.start2:
+    cli
+
+    /* Acquire lock */
+    ACQUIRE_SPINLOCK(edx, .spin2)
+
+    /* Do the calculation */
+    mov ecx, [esp+8]
+#ifdef CONFIG_SMP
+    mov eax, [ecx]
+    add eax, [esp+12]
+
+    /* Save result */
+    mov [ecx], eax
+#else
+
+    /* Do the calculation */
+    mov edx, [ecx]
+    mov eax, edx
+    add edx, [esp+12]
+
+    /* Save result */
+    mov [ecx], edx
+#endif
+
+    /* Release lock, restore flags and return */
+#ifdef CONFIG_SMP
+    sub eax, [esp+12]
+    RELEASE_SPINLOCK(edx)
+#endif
+    popfd
+    ret 12
+
+#ifdef CONFIG_SMP
+.spin2:
+    popfd
+    pushfd
+    SPIN_ON_LOCK(eax, .start2)
+#endif
+
+/*PLIST_ENTRY
+ *NTAPI
+ *ExInterlockedInsertHeadList(IN PLIST_ENTRY ListHead,
+ *                            IN PLIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global _ExInterlockedInsertHeadList at 12
+_ExInterlockedInsertHeadList at 12:
+
+    /* Save lock pointer */
+#ifdef CONFIG_SMP
+    mov edx, [esp+12]
+#endif
+
+    /* Save flags and disable interrupts */
+.start3:
+    pushfd
+    cli
+    ACQUIRE_SPINLOCK(edx, .spin3)
+
+    /* Get list pointers */
+    mov eax, [esp+8]
+    mov ecx, [esp+12]
+    mov edx, [eax]
+
+    /* Do the insert */
+    mov [ecx], edx
+    mov [ecx+4], eax
+    mov [eax], ecx
+    mov [edx+4], ecx
+
+    /* Release lock */
+#ifdef CONFIG_SMP
+    mov ecx, [esp+16]
+    RELEASE_SPINLOCK(ecx)
+#endif
+
+    /* Restore flags */
+    popfd
+
+    /* check if the list was empty and return NULL */
+    xor eax, edx
+    jz 2f
+
+    /* Return pointer */
+    mov eax, edx
+
+2:
+    ret 12
+
+#ifdef CONFIG_SMP
+.spin3:
+    popfd
+    SPIN_ON_LOCK(edx, .start3)
+#endif
+
+/*PLIST_ENTRY
+ *NTAPI
+ *ExInterlockedInsertTailList(IN PLIST_ENTRY ListHead,
+ *                            IN PLIST_ENTRY ListEntry,
+ *                            IN PKSPIN_LOCK Lock)
+ */
+.global _ExInterlockedInsertTailList at 12
+_ExInterlockedInsertTailList at 12:
+
+    /* Save lock pointer */
+#ifdef CONFIG_SMP
+    mov edx, [esp+12]
+#endif
+
+    /* Save flags and disable interrupts */
+.start4:
+    pushfd
+    cli
+    ACQUIRE_SPINLOCK(edx, .spin4)
+
+    /* Get list pointers */
+    mov eax, [esp+8]
+    mov ecx, [esp+12]
+    mov edx, [eax+4]
+
+    /* Do the insert */
+    mov [ecx], eax
+    mov [ecx+4], edx
+    mov [eax+4], ecx
[truncated at 1000 lines; 1285 more skipped] 
-------------- next part --------------
An HTML attachment was scrubbed...
URL: http://www.reactos.org/pipermail/ros-diffs/attachments/20051231/4d7e4b1c/attachment.html


More information about the Ros-diffs mailing list