KallistiOS git master
Independent SDK for the Sega Dreamcast
Loading...
Searching...
No Matches
spinlock.h
Go to the documentation of this file.
1/* KallistiOS ##version##
2
3 arch/dreamcast/include/spinlock.h
4 Copyright (C) 2001 Megan Potter
5
6*/
7
8/** \file arch/spinlock.h
9 \brief Simple locking.
10 \ingroup kthreads
11
12 This file contains definitions for very simple locks. Most of the time, you
13 will probably not use such low-level locking, but will opt for something
14 more fully featured like mutexes, semaphores, reader-writer semaphores, or
15 recursive locks.
16
17 \author Megan Potter
18
19 \see kos/sem.h
20 \see kos/mutex.h
21 \see kos/rwsem.h
22 \see kos/recursive_lock.h
23*/
24
25#ifndef __ARCH_SPINLOCK_H
26#define __ARCH_SPINLOCK_H
27
28/* Defines processor specific spinlocks */
29
30#include <sys/cdefs.h>
31__BEGIN_DECLS
32
33#include <stdbool.h>
34
35/* DC implementation uses threads most of the time */
36#include <kos/thread.h>
37
38/** \brief Spinlock data type. */
39typedef volatile int spinlock_t;
40
41/** \brief Spinlock initializer.
42
43 All created spinlocks should be initialized with this initializer so that
44 they are in a sane state, ready to be used.
45*/
46#define SPINLOCK_INITIALIZER 0
47
48/** \brief Initialize a spinlock.
49
50 This function abstracts initializing a spinlock, in case the
51 initializer is not applicable to what you are doing.
52
53 \param lock A pointer to the spinlock to be initialized.
54*/
55static inline void spinlock_init(spinlock_t *lock) {
57}
58
59/* Note here that even if threads aren't enabled, we'll still set the
60 lock so that it can be used for anti-IRQ protection (e.g., malloc) */
61
62/** \brief Try to lock, without spinning.
63
64 This function will attempt to lock the lock, but will not spin. Instead, it
65 will return whether the lock was obtained or not.
66
67 \param lock A pointer to the spinlock to be locked.
68 \return False if the lock is held by another thread. True if
69 the lock was successfully obtained.
70*/
71static inline bool spinlock_trylock(spinlock_t *lock) {
72 bool locked = false;
73
74 __asm__ __volatile__("tas.b @%2\n\t"
75 "movt %0\n\t"
76 : "=r"(locked), "=m"(*lock)
77 : "r"(lock)
78 : "t");
79
80 return locked;
81}
82
83/** \brief Spin on a lock.
84
85 This function will spin on the lock, and will not return until the lock has
86 been obtained for the calling thread.
87
88 \param lock A pointer to the spinlock to be locked.
89*/
90static inline void spinlock_lock(spinlock_t *lock) {
91 while(!spinlock_trylock(lock))
92 thd_pass();
93}
94
95/** \brief Spin on a lock.
96
97 This function will spin on the lock, and will not return until the lock has
98 been obtained for the calling thread, unless it is called from within an
99 interrupt context.
100
101 \param lock A pointer to the spinlock to be locked.
102 \return True if the spinlock could be locked, false otherwise.
103*/
104static inline bool spinlock_lock_irqsafe(spinlock_t *lock) {
105 if(irq_inside_int())
106 return spinlock_trylock(lock);
107
108 spinlock_lock(lock);
109 return true;
110}
111
112/** \brief Free a lock.
113
114 This function will unlock the lock that is currently held by the calling
115 thread. Do not use this function unless you actually hold the lock!
116
117 \param lock A pointer to the spinlock to be unlocked.
118*/
119static inline void spinlock_unlock(spinlock_t *lock) {
120 *lock = 0;
121}
122
123/** \brief Determine if a lock is locked.
124
125 This function will return whether or not the lock specified is actually locked
126 when it is called. This is NOT a thread-safe way of determining if a lock
127 will be locked when you get around to locking it!
128
129 \param lock A pointer to the spinlock to be checked.
130 \return True if the spinlock is locked, false otherwise.
131*/
132static inline bool spinlock_is_locked(spinlock_t *lock) {
133 return *lock != 0;
134}
135
136/** \cond INTERNAL */
137static inline void __spinlock_scoped_cleanup(spinlock_t **lock) {
138 spinlock_unlock(*lock);
139}
140
141#define ___spinlock_lock_scoped(m, l) \
142 spinlock_t *__scoped_spinlock_##l __attribute__((cleanup(__spinlock_scoped_cleanup))) = (spinlock_lock(m), (m))
143#define __spinlock_lock_scoped(m, l) ___spinlock_lock_scoped(m, l)
144/** \endcond */
145
146/** \brief Spin on a lock with scope management.
147
148 This macro will spin on the lock, similar to spinlock_lock(), with the
149 difference that the lock will automatically be freed once the execution
150 exits the functional block in which the macro was called.
151
152 \param lock A pointer to the spinlock to be locked.
153*/
154#define spinlock_lock_scoped(lock) \
155 __spinlock_lock_scoped((lock), __LINE__)
156
157__END_DECLS
158
159#endif /* __ARCH_SPINLOCK_H */
int irq_inside_int(void)
Returns whether inside of an interrupt context.
void thd_pass(void)
Throw away the current thread's timeslice.
static void spinlock_unlock(spinlock_t *lock)
Free a lock.
Definition spinlock.h:119
static bool spinlock_trylock(spinlock_t *lock)
Try to lock, without spinning.
Definition spinlock.h:71
static void spinlock_init(spinlock_t *lock)
Initialize a spinlock.
Definition spinlock.h:55
#define SPINLOCK_INITIALIZER
Spinlock initializer.
Definition spinlock.h:46
volatile int spinlock_t
Spinlock data type.
Definition spinlock.h:39
static void spinlock_lock(spinlock_t *lock)
Spin on a lock.
Definition spinlock.h:90
static bool spinlock_is_locked(spinlock_t *lock)
Determine if a lock is locked.
Definition spinlock.h:132
static bool spinlock_lock_irqsafe(spinlock_t *lock)
Spin on a lock.
Definition spinlock.h:104
Threading support.