--- /dev/null
+#ifndef __CORAX_SPINLOCK_H
+#define __CORAX_SPINLOCK_H
+
+#include <corax/types.h>
+
+typedef u32_t cx_spinlock_t;
+
+/*
+ * cx_spinlock_lock() - Lock a cx_spinlock, waiting indefinitely
+ *
+ * SYNOPSIS
+ * void cx_spinlock_lock(cx_spinlock_t *lock);
+ *
+ * DESCRIPTION
+ * The cx_spinlock_lock() function will indefinitely attempt to lock the cx_spinlock pointed to
+ * by `lock' until it succeeds, waiting in a busy loop.
+ *
+ * RETURN VALUE
+ * None
+ *
+ * ERRORS
+ * This function does not signal any errors.
+ */
+void cx_spinlock_lock(cx_spinlock_t*);
+
+/*
+ * cx_spinlock_trylock() - Attempt to lock a cx_spinlock
+ *
+ * SYNOPSIS
+ * int cx_spinlock_trylock(cx_spinlock_t *lock);
+ *
+ * DESCRIPTION
+ * The cx_spinlock_trylock() function will attempt once to lock the spinlock pointed to by
+ * `lock'. This function will not block.
+ *
+ * RETURN VALUE
+ * This function returns zero upon success, or a non-zero value if the lock could not be
+ * acquired.
+ *
+ * ERRORS
+ * This function does not signal any errors.
+ */
+int cx_spinlock_trylock(cx_spinlock_t*);
+
+/*
+ * cx_spinlock_unlock() - Unlock a cx_spinlock
+ *
+ * SYNOPSIS
+ * int cx_spinlock_unlock(cx_spinlock_t *lock);
+ *
+ * DESCRIPTION
+ * The cx_spinlock_unlock() function will unlock the cx_spinlock pointed to by `lock'. This
+ * function will unlock any cx_spinlock regardless of spinlock ownership, and the spinlock will
+ * always be unlocked when this function returns.
+ *
+ * RETURN VALUE
+ * This function returns zero if the lock was not locked, or a non-zero value if the lock had
+ * been locked. Either way, it is in an unlocked state when the call returns.
+ *
+ * ERRORS
+ * This function does not signal any errors.
+ */
+int cx_spinlock_unlock(cx_spinlock_t*);
+
+#endif /* __CORAX_SPINLOCK_H */
--- /dev/null
+ .section .text
+
+ .global cx_spinlock_lock
+ .global cx_spinlock_trylock
+ .global cx_spinlock_unlock
+
+#ifdef __i386__
+
+cx_spinlock_lock:
+ movl 4(%esp), %edx
+0: xorl %eax, %eax
+ movl $1, %ecx
+ lock
+ cmpxchgl %ecx, (%edx)
+ jnz 0b /* lock failed */
+ ret /* lock succeeded */
+
+cx_spinlock_trylock:
+ movl 4(%esp), %edx
+ xorl %eax, %eax
+ movl $1, %ecx
+ lock
+ cmpxchgl %ecx, (%edx)
+ ret
+
+cx_spinlock_unlock:
+ movl 4(%esp), %edx
+ xorl %eax, %eax
+ lock
+ xchgl %eax, (%edx)
+ ret
+
+#endif /* __i386__ */
+
+#ifdef __amd64__
+
+cx_spinlock_lock:
+ xorq %rax, %rax
+ movl $1, %ecx
+ lock
+ cmpxchgl %ecx, (%rdi)
+ jnz cx_spinlock_lock
+ ret
+
+cx_spinlock_trylock:
+ xorl %rax, %rax
+ movl $1, %ecx
+ lock
+ cmpxchgl %ecx, (%rdi)
+ ret
+
+cx_spinlock_unlock:
+ xorq %rax, %rax
+ lock
+ xchgl %eax, (%rdi)
+ ret
+
+#endif /* __amd64__ */