spinlock.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. #ifndef SKYNET_SPINLOCK_H
  2. #define SKYNET_SPINLOCK_H
  3. #define SPIN_INIT(q) spinlock_init(&(q)->lock);
  4. #define SPIN_LOCK(q) spinlock_lock(&(q)->lock);
  5. #define SPIN_UNLOCK(q) spinlock_unlock(&(q)->lock);
  6. #define SPIN_DESTROY(q) spinlock_destroy(&(q)->lock);
  7. #ifndef USE_PTHREAD_LOCK
  8. #ifdef __STDC_NO_ATOMICS__
  9. #define atomic_flag_ int
  10. #define ATOMIC_FLAG_INIT_ 0
  11. #define atomic_flag_test_and_set_(ptr) __sync_lock_test_and_set(ptr, 1)
  12. #define atomic_flag_clear_(ptr) __sync_lock_release(ptr)
  13. struct spinlock {
  14. atomic_flag_ lock;
  15. };
  16. static inline void
  17. spinlock_init(struct spinlock *lock) {
  18. atomic_flag_ v = ATOMIC_FLAG_INIT_;
  19. lock->lock = v;
  20. }
  21. static inline void
  22. spinlock_lock(struct spinlock *lock) {
  23. while (atomic_flag_test_and_set_(&lock->lock)) {}
  24. }
  25. static inline int
  26. spinlock_trylock(struct spinlock *lock) {
  27. return atomic_flag_test_and_set_(&lock->lock) == 0;
  28. }
  29. static inline void
  30. spinlock_unlock(struct spinlock *lock) {
  31. atomic_flag_clear_(&lock->lock);
  32. }
  33. static inline void
  34. spinlock_destroy(struct spinlock *lock) {
  35. (void) lock;
  36. }
  37. #else // __STDC_NO_ATOMICS__
  38. #include "atomic.h"
  39. #define atomic_test_and_set_(ptr) STD_ atomic_exchange_explicit(ptr, 1, STD_ memory_order_acquire)
  40. #define atomic_clear_(ptr) STD_ atomic_store_explicit(ptr, 0, STD_ memory_order_release);
  41. #define atomic_load_relaxed_(ptr) STD_ atomic_load_explicit(ptr, STD_ memory_order_relaxed)
  42. #if defined(__x86_64__)
  43. #include <immintrin.h> // For _mm_pause
  44. #define atomic_pause_() _mm_pause()
  45. #else
  46. #define atomic_pause_() ((void)0)
  47. #endif
  48. struct spinlock {
  49. STD_ atomic_int lock;
  50. };
  51. static inline void
  52. spinlock_init(struct spinlock *lock) {
  53. STD_ atomic_init(&lock->lock, 0);
  54. }
  55. static inline void
  56. spinlock_lock(struct spinlock *lock) {
  57. for (;;) {
  58. if (!atomic_test_and_set_(&lock->lock))
  59. return;
  60. while (atomic_load_relaxed_(&lock->lock))
  61. atomic_pause_();
  62. }
  63. }
  64. static inline int
  65. spinlock_trylock(struct spinlock *lock) {
  66. return !atomic_load_relaxed_(&lock->lock) &&
  67. !atomic_test_and_set_(&lock->lock);
  68. }
  69. static inline void
  70. spinlock_unlock(struct spinlock *lock) {
  71. atomic_clear_(&lock->lock);
  72. }
  73. static inline void
  74. spinlock_destroy(struct spinlock *lock) {
  75. (void) lock;
  76. }
  77. #endif // __STDC_NO_ATOMICS__
  78. #else
  79. #include <pthread.h>
  80. // we use mutex instead of spinlock for some reason
  81. // you can also replace to pthread_spinlock
  82. struct spinlock {
  83. pthread_mutex_t lock;
  84. };
  85. static inline void
  86. spinlock_init(struct spinlock *lock) {
  87. pthread_mutex_init(&lock->lock, NULL);
  88. }
  89. static inline void
  90. spinlock_lock(struct spinlock *lock) {
  91. pthread_mutex_lock(&lock->lock);
  92. }
  93. static inline int
  94. spinlock_trylock(struct spinlock *lock) {
  95. return pthread_mutex_trylock(&lock->lock) == 0;
  96. }
  97. static inline void
  98. spinlock_unlock(struct spinlock *lock) {
  99. pthread_mutex_unlock(&lock->lock);
  100. }
  101. static inline void
  102. spinlock_destroy(struct spinlock *lock) {
  103. pthread_mutex_destroy(&lock->lock);
  104. }
  105. #endif
  106. #endif