diff --git a/sdk/xtdk/kefuncs.h b/sdk/xtdk/kefuncs.h index bca3a86..6297e2c 100644 --- a/sdk/xtdk/kefuncs.h +++ b/sdk/xtdk/kefuncs.h @@ -16,6 +16,10 @@ /* Kernel services routines forward references */ +XTFASTCALL +VOID +KeAcquireSpinLock(IN OUT PKSPIN_LOCK SpinLock); + XTAPI VOID KeInitializeApc(IN PKAPC Apc, @@ -69,6 +73,10 @@ KeReleaseSemaphore(IN PKSEMAPHORE Semaphore, IN LONG Adjustment, IN BOOLEAN Wait); +XTFASTCALL +VOID +KeReleaseSpinLock(IN OUT PKSPIN_LOCK SpinLock); + XTAPI VOID KeSetTargetProcessorDpc(IN PKDPC Dpc, diff --git a/xtoskrnl/includes/kei.h b/xtoskrnl/includes/kei.h index eb16866..bbba82a 100644 --- a/xtoskrnl/includes/kei.h +++ b/xtoskrnl/includes/kei.h @@ -13,6 +13,10 @@ /* Kernel services routines forward references */ +XTFASTCALL +VOID +KeAcquireQueuedSpinLock(IN KSPIN_LOCK_QUEUE_LEVEL LockLevel); + XTAPI VOID KeClearEvent(IN PKEVENT Event); @@ -71,6 +75,10 @@ XTFASTCALL KRUNLEVEL KeRaiseRunLevel(IN KRUNLEVEL RunLevel); +XTFASTCALL +VOID +KeReleaseQueuedSpinLock(IN KSPIN_LOCK_QUEUE_LEVEL LockLevel); + XTAPI LONG KeSetEvent(IN PKEVENT Event, diff --git a/xtoskrnl/ke/spinlock.c b/xtoskrnl/ke/spinlock.c index bb2316f..5d11bfd 100644 --- a/xtoskrnl/ke/spinlock.c +++ b/xtoskrnl/ke/spinlock.c @@ -9,6 +9,53 @@ #include +/** + * Acquires a specified queued spinlock. + * + * @param LockLevel + * Supplies the queued spinlock level. + * + * @return This routine does not return any value. + * + * @since XT 1.0 + */ +XTFASTCALL +VOID +KeAcquireQueuedSpinLock(IN KSPIN_LOCK_QUEUE_LEVEL LockLevel) +{ + /* Acquire the queued spinlock */ + KeAcquireSpinLock(KeGetCurrentProcessorControlBlock()->LockQueue[LockLevel].Lock); +} + +/** + * Acquires a kernel spin lock. + * + * @param SpinLock + * Supplies a pointer to the kernel spin lock. + * + * @return This routine does not return any value. + * + * @since XT 1.0 + */ +XTFASTCALL +VOID +KeAcquireSpinLock(IN OUT PKSPIN_LOCK SpinLock) +{ + /* Try to acquire the lock */ + while(RtlAtomicBitTestAndSet((PLONG)SpinLock, 0)) + { + /* Wait until locked is cleared */ + while(*(VOLATILE PKSPIN_LOCK)SpinLock & 1) + { + /* Yield processor and keep waiting */ + ArYieldProcessor(); + } + } + + /* Add an explicit memory barrier */ + ArReadWriteBarrier(); +} + /** * Initializes a kernel spinlock object. * @@ -26,3 +73,42 @@ KeInitializeSpinLock(IN PKSPIN_LOCK SpinLock) /* Zero initialize spinlock */ *SpinLock = 0; } + +/** + * Releases a queued spinlock. + * + * @param LockLevel + * Supplies the queued spinlock level. + * + * @return This routine does not return any value. + * + * @since XT 1.0 + */ +XTFASTCALL +VOID +KeReleaseQueuedSpinLock(IN KSPIN_LOCK_QUEUE_LEVEL LockLevel) +{ + /* Clear the lock */ + KeReleaseSpinLock(KeGetCurrentProcessorControlBlock()->LockQueue[LockLevel].Lock); +} + +/** + * Releases a kernel spin lock. + * + * @param SpinLock + * Supplies a pointer to the kernel spin lock. + * + * @return This routine does not return any value. + * + * @since XT 1.0 + */ +XTFASTCALL +VOID +KeReleaseSpinLock(IN OUT PKSPIN_LOCK SpinLock) +{ + /* Clear the lock */ + RtlAtomicAnd32((PLONG)SpinLock, 0); + + /* Add an explicit memory barrier */ + ArReadWriteBarrier(); +}