KallistiOS git master
Independent SDK for the Sega Dreamcast
Loading...
Searching...
No Matches
spinlock.h
Go to the documentation of this file.
1/* KallistiOS ##version##
2
3 arch/dreamcast/include/spinlock.h
4 Copyright (C) 2001 Megan Potter
5
6*/
7
8/** \file arch/spinlock.h
9 \brief Simple locking.
10 \ingroup kthreads
11
12 This file contains definitions for very simple locks. Most of the time, you
13 will probably not use such low-level locking, but will opt for something
14 more fully featured like mutexes, semaphores, or reader-writer semaphores.
15
16 \author Megan Potter
17
18 \see kos/sem.h
19 \see kos/mutex.h
20 \see kos/rwsem.h
21*/
22
23#ifndef __ARCH_SPINLOCK_H
24#define __ARCH_SPINLOCK_H
25
26/* Defines processor specific spinlocks */
27
28#include <kos/cdefs.h>
29__BEGIN_DECLS
30
31#include <stdbool.h>
32
33/* DC implementation uses threads most of the time */
34#include <kos/thread.h>
35
36/** \brief Spinlock data type. */
37typedef volatile int spinlock_t;
38
39/** \brief Spinlock initializer.
40
41 All created spinlocks should be initialized with this initializer so that
42 they are in a sane state, ready to be used.
43*/
44#define SPINLOCK_INITIALIZER 0
45
46/** \brief Initialize a spinlock.
47
48 This function abstracts initializing a spinlock, in case the
49 initializer is not applicable to what you are doing.
50
51 \param lock A pointer to the spinlock to be initialized.
52*/
53static inline void spinlock_init(spinlock_t *lock) {
55}
56
57/* Note here that even if threads aren't enabled, we'll still set the
58 lock so that it can be used for anti-IRQ protection (e.g., malloc) */
59
60/** \brief Try to lock, without spinning.
61
62 This function will attempt to lock the lock, but will not spin. Instead, it
63 will return whether the lock was obtained or not.
64
65 \param lock A pointer to the spinlock to be locked.
66 \return False if the lock is held by another thread. True if
67 the lock was successfully obtained.
68*/
69static inline bool spinlock_trylock(spinlock_t *lock) {
70 bool locked = false;
71
72 __asm__ __volatile__("tas.b @%2\n\t"
73 "movt %0\n\t"
74 : "=r"(locked), "=m"(*lock)
75 : "r"(lock)
76 : "t");
77
78 return locked;
79}
80
81/** \brief Spin on a lock.
82
83 This function will spin on the lock, and will not return until the lock has
84 been obtained for the calling thread.
85
86 \param lock A pointer to the spinlock to be locked.
87*/
88static inline void spinlock_lock(spinlock_t *lock) {
89 while(!spinlock_trylock(lock))
90 thd_pass();
91}
92
93/** \brief Spin on a lock.
94
95 This function will spin on the lock, and will not return until the lock has
96 been obtained for the calling thread, unless it is called from within an
97 interrupt context.
98
99 \param lock A pointer to the spinlock to be locked.
100 \return True if the spinlock could be locked, false otherwise.
101*/
102static inline bool spinlock_lock_irqsafe(spinlock_t *lock) {
103 if(irq_inside_int())
104 return spinlock_trylock(lock);
105
106 spinlock_lock(lock);
107 return true;
108}
109
110/** \brief Free a lock.
111
112 This function will unlock the lock that is currently held by the calling
113 thread. Do not use this function unless you actually hold the lock!
114
115 \param lock A pointer to the spinlock to be unlocked.
116*/
117static inline void spinlock_unlock(spinlock_t *lock) {
118 *lock = 0;
119}
120
121/** \brief Determine if a lock is locked.
122
123 This function will return whether or not the lock specified is actually locked
124 when it is called. This is NOT a thread-safe way of determining if a lock
125 will be locked when you get around to locking it!
126
127 \param lock A pointer to the spinlock to be checked.
128 \return True if the spinlock is locked, false otherwise.
129*/
130static inline bool spinlock_is_locked(const spinlock_t *lock) {
131 return *lock != 0;
132}
133
134/** \cond INTERNAL */
135static inline void __spinlock_scoped_cleanup(spinlock_t **lock) {
136 spinlock_unlock(*lock);
137}
138
139#define ___spinlock_lock_scoped(m, l) \
140 spinlock_t *__scoped_spinlock_##l __attribute__((cleanup(__spinlock_scoped_cleanup))) = (spinlock_lock(m), (m))
141#define __spinlock_lock_scoped(m, l) ___spinlock_lock_scoped(m, l)
142/** \endcond */
143
144/** \brief Spin on a lock with scope management.
145
146 This macro will spin on the lock, similar to spinlock_lock(), with the
147 difference that the lock will automatically be freed once the execution
148 exits the functional block in which the macro was called.
149
150 \param lock A pointer to the spinlock to be locked.
151*/
152#define spinlock_lock_scoped(lock) \
153 __spinlock_lock_scoped((lock), __LINE__)
154
155__END_DECLS
156
157#endif /* __ARCH_SPINLOCK_H */
Various common macros used throughout the codebase.
int irq_inside_int(void)
Returns whether inside of an interrupt context.
void thd_pass(void)
Throw away the current thread's timeslice.
static void spinlock_unlock(spinlock_t *lock)
Free a lock.
Definition spinlock.h:117
static bool spinlock_trylock(spinlock_t *lock)
Try to lock, without spinning.
Definition spinlock.h:69
static void spinlock_init(spinlock_t *lock)
Initialize a spinlock.
Definition spinlock.h:53
static bool spinlock_is_locked(const spinlock_t *lock)
Determine if a lock is locked.
Definition spinlock.h:130
#define SPINLOCK_INITIALIZER
Spinlock initializer.
Definition spinlock.h:44
volatile int spinlock_t
Spinlock data type.
Definition spinlock.h:37
static void spinlock_lock(spinlock_t *lock)
Spin on a lock.
Definition spinlock.h:88
static bool spinlock_lock_irqsafe(spinlock_t *lock)
Spin on a lock.
Definition spinlock.h:102
Threading support.