Advertisement
Guest User

Untitled

a guest
Mar 28th, 2017
48
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 6.72 KB | None | 0 0
  1. #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
  2. #define _LINUX_ATOMIC_INSTRUMENTED_H
  3.  
  4. static __always_inline int atomic_read(const atomic_t *v)
  5. {
  6. return arch_atomic_read(v);
  7. }
  8.  
  9. static __always_inline long long atomic64_read(const atomic64_t *v)
  10. {
  11. return arch_atomic64_read(v);
  12. }
  13.  
  14. static __always_inline void atomic_set(atomic_t *v, int i)
  15. {
  16. arch_atomic_set(v, i);
  17. }
  18.  
  19. static __always_inline void atomic64_set(atomic64_t *v, long long i)
  20. {
  21. arch_atomic64_set(v, i);
  22. }
  23.  
  24. static __always_inline int atomic_xchg(atomic_t *v, int i)
  25. {
  26. return arch_atomic_xchg(v, i);
  27. }
  28.  
  29. static __always_inline long long atomic64_xchg(atomic64_t *v, long long i)
  30. {
  31. return arch_atomic64_xchg(v, i);
  32. }
  33.  
  34. static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  35. {
  36. return arch_atomic_cmpxchg(v, old, new);
  37. }
  38.  
  39. static __always_inline long long atomic64_cmpxchg(atomic64_t *v, long long old,
  40. long long new)
  41. {
  42. return arch_atomic64_cmpxchg(v, old, new);
  43. }
  44.  
  45. #ifdef arch_atomic_try_cmpxchg
  46. #define atomic_try_cmpxchg atomic_try_cmpxchg
  47. static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  48. {
  49. return arch_atomic_try_cmpxchg(v, old, new);
  50. }
  51. #endif
  52.  
  53. #ifdef arch_atomic64_try_cmpxchg
  54. #define atomic64_try_cmpxchg atomic64_try_cmpxchg
  55. static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, long long *old,
  56. long long new)
  57. {
  58. return arch_atomic64_try_cmpxchg(v, old, new);
  59. }
  60. #endif
  61.  
  62. static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
  63. {
  64. return __arch_atomic_add_unless(v, a, u);
  65. }
  66.  
  67.  
  68. static __always_inline bool atomic64_add_unless(atomic64_t *v, long long a,
  69. long long u)
  70. {
  71. return arch_atomic64_add_unless(v, a, u);
  72. }
  73.  
  74. static __always_inline void atomic_inc(atomic_t *v)
  75. {
  76. arch_atomic_inc(v);
  77. }
  78.  
  79. static __always_inline void atomic64_inc(atomic64_t *v)
  80. {
  81. arch_atomic64_inc(v);
  82. }
  83.  
  84. static __always_inline void atomic_dec(atomic_t *v)
  85. {
  86. arch_atomic_dec(v);
  87. }
  88.  
  89. static __always_inline void atomic64_dec(atomic64_t *v)
  90. {
  91. arch_atomic64_dec(v);
  92. }
  93.  
  94. static __always_inline void atomic_add(int i, atomic_t *v)
  95. {
  96. arch_atomic_add(i, v);
  97. }
  98.  
  99. static __always_inline void atomic64_add(long long i, atomic64_t *v)
  100. {
  101. arch_atomic64_add(i, v);
  102. }
  103.  
  104. static __always_inline void atomic_sub(int i, atomic_t *v)
  105. {
  106. arch_atomic_sub(i, v);
  107. }
  108.  
  109. static __always_inline void atomic64_sub(long long i, atomic64_t *v)
  110. {
  111. arch_atomic64_sub(i, v);
  112. }
  113.  
  114. static __always_inline void atomic_and(int i, atomic_t *v)
  115. {
  116. arch_atomic_and(i, v);
  117. }
  118.  
  119. static __always_inline void atomic64_and(long long i, atomic64_t *v)
  120. {
  121. arch_atomic64_and(i, v);
  122. }
  123.  
  124. static __always_inline void atomic_or(int i, atomic_t *v)
  125. {
  126. arch_atomic_or(i, v);
  127. }
  128.  
  129. static __always_inline void atomic64_or(long long i, atomic64_t *v)
  130. {
  131. arch_atomic64_or(i, v);
  132. }
  133.  
  134. static __always_inline void atomic_xor(int i, atomic_t *v)
  135. {
  136. arch_atomic_xor(i, v);
  137. }
  138.  
  139. static __always_inline void atomic64_xor(long long i, atomic64_t *v)
  140. {
  141. arch_atomic64_xor(i, v);
  142. }
  143.  
  144. static __always_inline int atomic_inc_return(atomic_t *v)
  145. {
  146. return arch_atomic_inc_return(v);
  147. }
  148.  
  149. static __always_inline long long atomic64_inc_return(atomic64_t *v)
  150. {
  151. return arch_atomic64_inc_return(v);
  152. }
  153.  
  154. static __always_inline int atomic_dec_return(atomic_t *v)
  155. {
  156. return arch_atomic_dec_return(v);
  157. }
  158.  
  159. static __always_inline long long atomic64_dec_return(atomic64_t *v)
  160. {
  161. return arch_atomic64_dec_return(v);
  162. }
  163.  
  164. static __always_inline long long atomic64_inc_not_zero(atomic64_t *v)
  165. {
  166. return arch_atomic64_inc_not_zero(v);
  167. }
  168.  
  169. static __always_inline long long atomic64_dec_if_positive(atomic64_t *v)
  170. {
  171. return arch_atomic64_dec_if_positive(v);
  172. }
  173.  
  174. static __always_inline bool atomic_dec_and_test(atomic_t *v)
  175. {
  176. return arch_atomic_dec_and_test(v);
  177. }
  178.  
  179. static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
  180. {
  181. return arch_atomic64_dec_and_test(v);
  182. }
  183.  
  184. static __always_inline bool atomic_inc_and_test(atomic_t *v)
  185. {
  186. return arch_atomic_inc_and_test(v);
  187. }
  188.  
  189. static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
  190. {
  191. return arch_atomic64_inc_and_test(v);
  192. }
  193.  
  194. static __always_inline int atomic_add_return(int i, atomic_t *v)
  195. {
  196. return arch_atomic_add_return(i, v);
  197. }
  198.  
  199. static __always_inline long long atomic64_add_return(long long i, atomic64_t *v)
  200. {
  201. return arch_atomic64_add_return(i, v);
  202. }
  203.  
  204. static __always_inline int atomic_sub_return(int i, atomic_t *v)
  205. {
  206. return arch_atomic_sub_return(i, v);
  207. }
  208.  
  209. static __always_inline long long atomic64_sub_return(long long i, atomic64_t *v)
  210. {
  211. return arch_atomic64_sub_return(i, v);
  212. }
  213.  
  214. static __always_inline int atomic_fetch_add(int i, atomic_t *v)
  215. {
  216. return arch_atomic_fetch_add(i, v);
  217. }
  218.  
  219. static __always_inline long long atomic64_fetch_add(long long i, atomic64_t *v)
  220. {
  221. return arch_atomic64_fetch_add(i, v);
  222. }
  223.  
  224. static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
  225. {
  226. return arch_atomic_fetch_sub(i, v);
  227. }
  228.  
  229. static __always_inline long long atomic64_fetch_sub(long long i, atomic64_t *v)
  230. {
  231. return arch_atomic64_fetch_sub(i, v);
  232. }
  233.  
  234. static __always_inline int atomic_fetch_and(int i, atomic_t *v)
  235. {
  236. return arch_atomic_fetch_and(i, v);
  237. }
  238.  
  239. static __always_inline long long atomic64_fetch_and(long long i, atomic64_t *v)
  240. {
  241. return arch_atomic64_fetch_and(i, v);
  242. }
  243.  
  244. static __always_inline int atomic_fetch_or(int i, atomic_t *v)
  245. {
  246. return arch_atomic_fetch_or(i, v);
  247. }
  248.  
  249. static __always_inline long long atomic64_fetch_or(long long i, atomic64_t *v)
  250. {
  251. return arch_atomic64_fetch_or(i, v);
  252. }
  253.  
  254. static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
  255. {
  256. return arch_atomic_fetch_xor(i, v);
  257. }
  258.  
  259. static __always_inline long long atomic64_fetch_xor(long long i, atomic64_t *v)
  260. {
  261. return arch_atomic64_fetch_xor(i, v);
  262. }
  263.  
  264. static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
  265. {
  266. return arch_atomic_sub_and_test(i, v);
  267. }
  268.  
  269. static __always_inline bool atomic64_sub_and_test(long long i, atomic64_t *v)
  270. {
  271. return arch_atomic64_sub_and_test(i, v);
  272. }
  273.  
  274. static __always_inline bool atomic_add_negative(int i, atomic_t *v)
  275. {
  276. return arch_atomic_add_negative(i, v);
  277. }
  278.  
  279. static __always_inline bool atomic64_add_negative(long long i, atomic64_t *v)
  280. {
  281. return arch_atomic64_add_negative(i, v);
  282. }
  283.  
  284. #define cmpxchg(ptr, old, new) \
  285. ({ \
  286. arch_cmpxchg((ptr), (old), (new)); \
  287. })
  288.  
  289. #define sync_cmpxchg(ptr, old, new) \
  290. ({ \
  291. arch_sync_cmpxchg((ptr), (old), (new)); \
  292. })
  293.  
  294. #define cmpxchg_local(ptr, old, new) \
  295. ({ \
  296. arch_cmpxchg_local((ptr), (old), (new)); \
  297. })
  298.  
  299. #define cmpxchg64(ptr, old, new) \
  300. ({ \
  301. arch_cmpxchg64((ptr), (old), (new)); \
  302. })
  303.  
  304. #define cmpxchg64_local(ptr, old, new) \
  305. ({ \
  306. arch_cmpxchg64_local((ptr), (old), (new)); \
  307. })
  308.  
  309. #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
  310. ({ \
  311. arch_cmpxchg_double((p1), (p2), (o1), (o2), (n1), (n2)); \
  312. })
  313.  
  314. #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
  315. ({ \
  316. arch_cmpxchg_double_local((p1), (p2), (o1), (o2), (n1), (n2)); \
  317. })
  318.  
  319. #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement