31 #ifndef _CONCURRENCE_H
32 #define _CONCURRENCE_H 1
35 #include <bits/gthr.h>
38 _GLIBCXX_BEGIN_NAMESPACE(__gnu_cxx)
45 enum _Lock_policy { _S_single, _S_mutex, _S_atomic };
49 static const _Lock_policy __default_lock_policy =
51 #if (defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2) \
52 && defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4))
68 {
return "__gnu_cxx::__concurrence_lock_error"; }
76 {
return "__gnu_cxx::__concurrence_unlock_error"; }
84 {
return "__gnu_cxx::__concurrence_broadcast_error"; }
92 {
return "__gnu_cxx::__concurrence_wait_error"; }
97 __throw_concurrence_lock_error()
100 throw __concurrence_lock_error();
107 __throw_concurrence_unlock_error()
110 throw __concurrence_unlock_error();
116 #ifdef __GTHREAD_HAS_COND
118 __throw_concurrence_broadcast_error()
121 throw __concurrence_broadcast_error();
128 __throw_concurrence_wait_error()
131 throw __concurrence_wait_error();
141 __gthread_mutex_t _M_mutex;
143 __mutex(
const __mutex&);
144 __mutex& operator=(
const __mutex&);
150 if (__gthread_active_p())
152 #if defined __GTHREAD_MUTEX_INIT
153 __gthread_mutex_t __tmp = __GTHREAD_MUTEX_INIT;
156 __GTHREAD_MUTEX_INIT_FUNCTION(&_M_mutex);
165 if (__gthread_active_p())
167 if (__gthread_mutex_lock(&_M_mutex) != 0)
168 __throw_concurrence_lock_error();
176 if (__gthread_active_p())
178 if (__gthread_mutex_unlock(&_M_mutex) != 0)
179 __throw_concurrence_unlock_error();
184 __gthread_mutex_t* gthread_mutex(
void)
185 {
return &_M_mutex; }
188 class __recursive_mutex
191 __gthread_recursive_mutex_t _M_mutex;
193 __recursive_mutex(
const __recursive_mutex&);
194 __recursive_mutex& operator=(
const __recursive_mutex&);
200 if (__gthread_active_p())
202 #if defined __GTHREAD_RECURSIVE_MUTEX_INIT
203 __gthread_recursive_mutex_t __tmp = __GTHREAD_RECURSIVE_MUTEX_INIT;
206 __GTHREAD_RECURSIVE_MUTEX_INIT_FUNCTION(&_M_mutex);
215 if (__gthread_active_p())
217 if (__gthread_recursive_mutex_lock(&_M_mutex) != 0)
218 __throw_concurrence_lock_error();
226 if (__gthread_active_p())
228 if (__gthread_recursive_mutex_unlock(&_M_mutex) != 0)
229 __throw_concurrence_unlock_error();
234 __gthread_recursive_mutex_t* gthread_recursive_mutex(
void)
235 {
return &_M_mutex; }
244 typedef __mutex __mutex_type;
247 __mutex_type& _M_device;
253 explicit __scoped_lock(__mutex_type& __name) : _M_device(__name)
254 { _M_device.lock(); }
257 { _M_device.unlock(); }
260 #ifdef __GTHREAD_HAS_COND
264 __gthread_cond_t _M_cond;
266 __cond(
const __cond&);
267 __cond& operator=(
const __cond&);
273 if (__gthread_active_p())
275 #if defined __GTHREAD_COND_INIT
276 __gthread_cond_t __tmp = __GTHREAD_COND_INIT;
279 __GTHREAD_COND_INIT_FUNCTION(&_M_cond);
288 if (__gthread_active_p())
290 if (__gthread_cond_broadcast(&_M_cond) != 0)
291 __throw_concurrence_broadcast_error();
296 void wait(__mutex *mutex)
300 if (__gthread_cond_wait(&_M_cond, mutex->gthread_mutex()) != 0)
301 __throw_concurrence_wait_error();
306 void wait_recursive(__recursive_mutex *mutex)
310 if (__gthread_cond_wait_recursive(&_M_cond,
311 mutex->gthread_recursive_mutex())
313 __throw_concurrence_wait_error();
320 _GLIBCXX_END_NAMESPACE