[arch][x86] implement basic spinlocks

-This fixes the instability, seems stable on x86-64.
This commit is contained in:
Travis Geiselbrecht
2025-04-01 20:02:00 -07:00
parent 5a520eca3e
commit 8fdadd9b33
4 changed files with 96 additions and 2 deletions

44
arch/x86/32/spinlock.S Normal file
View File

@@ -0,0 +1,44 @@
/*
* Copyright (c) 2025 Travis Geiselbrecht
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT
*/
#include <lk/asm.h>
#if WITH_SMP
// void arch_spin_lock(spin_lock_t *lock);
FUNCTION(arch_spin_lock)
mov 4(%esp), %ecx
mov $1, %edx
0:
xor %eax, %eax
lock cmpxchg %edx, (%ecx)
jz 1f
pause
jmp 0b
1:
ret
END_FUNCTION(arch_spin_lock)
// int arch_spin_trylock(spin_lock_t *lock);
FUNCTION(arch_spin_trylock)
mov 4(%esp), %ecx
mov $1, %eax
lock xchg %eax, (%ecx)
ret
END_FUNCTION(arch_spin_trylock)
// void arch_spin_unlock(spin_lock_t *lock);
FUNCTION(arch_spin_unlock)
mov 4(%esp), %ecx
movl $0, (%ecx)
ret
END_FUNCTION(arch_spin_unlock)
#endif // WITH_SMP

40
arch/x86/64/spinlock.S Normal file
View File

@@ -0,0 +1,40 @@
/*
* Copyright (c) 2025 Travis Geiselbrecht
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT
*/
#include <lk/asm.h>
#if WITH_SMP
// void arch_spin_lock(spin_lock_t *lock);
FUNCTION(arch_spin_lock)
mov $1, %esi
0:
xor %eax, %eax
lock cmpxchg %esi, (%rdi)
jz 1f
pause
jmp 0b
1:
ret
END_FUNCTION(arch_spin_lock)
// int arch_spin_trylock(spin_lock_t *lock);
FUNCTION(arch_spin_trylock)
mov $1, %eax
lock xchg %eax, (%rdi)
ret
END_FUNCTION(arch_spin_trylock)
// void arch_spin_unlock(spin_lock_t *lock);
FUNCTION(arch_spin_unlock)
movl $0, (%rdi)
ret
END_FUNCTION(arch_spin_unlock)
#endif // WITH_SMP

View File

@@ -7,13 +7,16 @@
*/
#pragma once
#include <lk/compiler.h>
#include <arch/ops.h>
#include <arch/x86.h>
#include <stdbool.h>
#define SPIN_LOCK_INITIAL_VALUE (0)
typedef unsigned long spin_lock_t;
__BEGIN_CDECLS
typedef unsigned int spin_lock_t;
typedef x86_flags_t spin_lock_saved_state_t;
typedef uint spin_lock_save_flags_t;
@@ -27,6 +30,11 @@ static inline bool arch_spin_lock_held(spin_lock_t *lock) {
return *lock != 0;
}
#if WITH_SMP
void arch_spin_lock(spin_lock_t *lock);
int arch_spin_trylock(spin_lock_t *lock);
void arch_spin_unlock(spin_lock_t *lock);
#else
static inline void arch_spin_lock(spin_lock_t *lock) {
*lock = 1;
}
@@ -38,6 +46,7 @@ static inline int arch_spin_trylock(spin_lock_t *lock) {
static inline void arch_spin_unlock(spin_lock_t *lock) {
*lock = 0;
}
#endif
/* flags are unused on x86 */
#define ARCH_DEFAULT_SPIN_LOCK_FLAG_INTERRUPTS 0
@@ -53,4 +62,4 @@ arch_interrupt_restore(spin_lock_saved_state_t old_state, spin_lock_save_flags_t
x86_restore_flags(old_state);
}
__END_CDECLS

View File

@@ -69,6 +69,7 @@ MODULE_SRCS += \
$(SUBARCH_DIR)/gdt.S \
$(SUBARCH_DIR)/mmu.c \
$(SUBARCH_DIR)/ops.S \
$(SUBARCH_DIR)/spinlock.S \
\
$(LOCAL_DIR)/arch.c \
$(LOCAL_DIR)/cache.c \