1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
|
#ifndef AL_ATOMIC_H
#define AL_ATOMIC_H
typedef void *volatile XchgPtr;
#if defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) && !defined(__QNXNTO__)
typedef unsigned int RefCount;
inline RefCount IncrementRef(volatile RefCount *ptr)
{ return __sync_add_and_fetch(ptr, 1); }
inline RefCount DecrementRef(volatile RefCount *ptr)
{ return __sync_sub_and_fetch(ptr, 1); }
inline int ExchangeInt(volatile int *ptr, int newval)
{
return __sync_lock_test_and_set(ptr, newval);
}
inline void *ExchangePtr(XchgPtr *ptr, void *newval)
{
return __sync_lock_test_and_set(ptr, newval);
}
inline int CompExchangeInt(volatile int *ptr, int oldval, int newval)
{
return __sync_val_compare_and_swap(ptr, oldval, newval);
}
inline void *CompExchangePtr(XchgPtr *ptr, void *oldval, void *newval)
{
return __sync_val_compare_and_swap(ptr, oldval, newval);
}
#elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
inline unsigned int xaddl(volatile unsigned int *dest, int incr)
{
unsigned int ret;
__asm__ __volatile__("lock; xaddl %0,(%1)"
: "=r" (ret)
: "r" (dest), "0" (incr)
: "memory");
return ret;
}
typedef unsigned int RefCount;
inline RefCount IncrementRef(volatile RefCount *ptr)
{ return xaddl(ptr, 1)+1; }
inline RefCount DecrementRef(volatile RefCount *ptr)
{ return xaddl(ptr, -1)-1; }
inline int ExchangeInt(volatile int *dest, int newval)
{
int ret;
__asm__ __volatile__("lock; xchgl %0,(%1)"
: "=r" (ret)
: "r" (dest), "0" (newval)
: "memory");
return ret;
}
inline void *ExchangePtr(XchgPtr *dest, void *newval)
{
void *ret;
__asm__ __volatile__(
#ifdef __i386__
"lock; xchgl %0,(%1)"
#else
"lock; xchgq %0,(%1)"
#endif
: "=r" (ret)
: "r" (dest), "0" (newval)
: "memory"
);
return ret;
}
inline int CompExchangeInt(volatile int *dest, int oldval, int newval)
{
int ret;
__asm__ __volatile__("lock; cmpxchgl %2,(%1)"
: "=a" (ret)
: "r" (dest), "r" (newval), "0" (oldval)
: "memory");
return ret;
}
inline void *CompExchangePtr(XchgPtr *dest, void *oldval, void *newval)
{
void *ret;
__asm__ __volatile__(
#ifdef __i386__
"lock; cmpxchgl %2,(%1)"
#else
"lock; cmpxchgq %2,(%1)"
#endif
: "=a" (ret)
: "r" (dest), "r" (newval), "0" (oldval)
: "memory"
);
return ret;
}
#elif defined(_WIN32)
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
typedef LONG RefCount;
inline RefCount IncrementRef(volatile RefCount *ptr)
{ return InterlockedIncrement(ptr); }
inline RefCount DecrementRef(volatile RefCount *ptr)
{ return InterlockedDecrement(ptr); }
static_assert(sizeof(LONG)==sizeof(int), "sizeof LONG does not match sizeof int");
inline int ExchangeInt(volatile int *ptr, int newval)
{
union {
volatile int *i;
volatile LONG *l;
} u = { ptr };
return InterlockedExchange(u.l, newval);
}
inline void *ExchangePtr(XchgPtr *ptr, void *newval)
{
return InterlockedExchangePointer(ptr, newval);
}
inline int CompExchangeInt(volatile int *ptr, int oldval, int newval)
{
union {
volatile int *i;
volatile LONG *l;
} u = { ptr };
return InterlockedCompareExchange(u.l, newval, oldval);
}
inline void *CompExchangePtr(XchgPtr *ptr, void *oldval, void *newval)
{
return InterlockedCompareExchangePointer(ptr, newval, oldval);
}
#else
#error "No atomic functions available on this platform!"
typedef unsigned int RefCount;
#endif
#endif /* AL_ATOMIC_H */
|