]> www.pilppa.org Git - linux-2.6-omap-h63xx.git/blob - include/asm-mips/atomic.h
654b97d3e13a405302cdf0e96d04f83977841bf5
[linux-2.6-omap-h63xx.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
22
23 #ifndef _ASM_ATOMIC_H
24 #define _ASM_ATOMIC_H
25
26 #include <asm/cpu-features.h>
27 #include <asm/interrupt.h>
28 #include <asm/war.h>
29
30 typedef struct { volatile int counter; } atomic_t;
31
32 #define ATOMIC_INIT(i)    { (i) }
33
34 /*
35  * atomic_read - read atomic variable
36  * @v: pointer of type atomic_t
37  *
38  * Atomically reads the value of @v.
39  */
40 #define atomic_read(v)          ((v)->counter)
41
42 /*
43  * atomic_set - set atomic variable
44  * @v: pointer of type atomic_t
45  * @i: required value
46  *
47  * Atomically sets the value of @v to @i.
48  */
49 #define atomic_set(v,i)         ((v)->counter = (i))
50
51 /*
52  * atomic_add - add integer to atomic variable
53  * @i: integer value to add
54  * @v: pointer of type atomic_t
55  *
56  * Atomically adds @i to @v.
57  */
58 static __inline__ void atomic_add(int i, atomic_t * v)
59 {
60         if (cpu_has_llsc && R10000_LLSC_WAR) {
61                 unsigned long temp;
62
63                 __asm__ __volatile__(
64                 "       .set    mips3                                   \n"
65                 "1:     ll      %0, %1          # atomic_add            \n"
66                 "       addu    %0, %2                                  \n"
67                 "       sc      %0, %1                                  \n"
68                 "       beqzl   %0, 1b                                  \n"
69                 "       .set    mips0                                   \n"
70                 : "=&r" (temp), "=m" (v->counter)
71                 : "Ir" (i), "m" (v->counter));
72         } else if (cpu_has_llsc) {
73                 unsigned long temp;
74
75                 __asm__ __volatile__(
76                 "       .set    mips3                                   \n"
77                 "1:     ll      %0, %1          # atomic_add            \n"
78                 "       addu    %0, %2                                  \n"
79                 "       sc      %0, %1                                  \n"
80                 "       beqz    %0, 1b                                  \n"
81                 "       .set    mips0                                   \n"
82                 : "=&r" (temp), "=m" (v->counter)
83                 : "Ir" (i), "m" (v->counter));
84         } else {
85                 unsigned long flags;
86
87                 local_irq_save(flags);
88                 v->counter += i;
89                 local_irq_restore(flags);
90         }
91 }
92
93 /*
94  * atomic_sub - subtract the atomic variable
95  * @i: integer value to subtract
96  * @v: pointer of type atomic_t
97  *
98  * Atomically subtracts @i from @v.
99  */
100 static __inline__ void atomic_sub(int i, atomic_t * v)
101 {
102         if (cpu_has_llsc && R10000_LLSC_WAR) {
103                 unsigned long temp;
104
105                 __asm__ __volatile__(
106                 "       .set    mips3                                   \n"
107                 "1:     ll      %0, %1          # atomic_sub            \n"
108                 "       subu    %0, %2                                  \n"
109                 "       sc      %0, %1                                  \n"
110                 "       beqzl   %0, 1b                                  \n"
111                 "       .set    mips0                                   \n"
112                 : "=&r" (temp), "=m" (v->counter)
113                 : "Ir" (i), "m" (v->counter));
114         } else if (cpu_has_llsc) {
115                 unsigned long temp;
116
117                 __asm__ __volatile__(
118                 "       .set    mips3                                   \n"
119                 "1:     ll      %0, %1          # atomic_sub            \n"
120                 "       subu    %0, %2                                  \n"
121                 "       sc      %0, %1                                  \n"
122                 "       beqz    %0, 1b                                  \n"
123                 "       .set    mips0                                   \n"
124                 : "=&r" (temp), "=m" (v->counter)
125                 : "Ir" (i), "m" (v->counter));
126         } else {
127                 unsigned long flags;
128
129                 local_irq_save(flags);
130                 v->counter -= i;
131                 local_irq_restore(flags);
132         }
133 }
134
135 /*
136  * Same as above, but return the result value
137  */
138 static __inline__ int atomic_add_return(int i, atomic_t * v)
139 {
140         unsigned long result;
141
142         if (cpu_has_llsc && R10000_LLSC_WAR) {
143                 unsigned long temp;
144
145                 __asm__ __volatile__(
146                 "       .set    mips3                                   \n"
147                 "1:     ll      %1, %2          # atomic_add_return     \n"
148                 "       addu    %0, %1, %3                              \n"
149                 "       sc      %0, %2                                  \n"
150                 "       beqzl   %0, 1b                                  \n"
151                 "       addu    %0, %1, %3                              \n"
152                 "       sync                                            \n"
153                 "       .set    mips0                                   \n"
154                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
155                 : "Ir" (i), "m" (v->counter)
156                 : "memory");
157         } else if (cpu_has_llsc) {
158                 unsigned long temp;
159
160                 __asm__ __volatile__(
161                 "       .set    mips3                                   \n"
162                 "1:     ll      %1, %2          # atomic_add_return     \n"
163                 "       addu    %0, %1, %3                              \n"
164                 "       sc      %0, %2                                  \n"
165                 "       beqz    %0, 1b                                  \n"
166                 "       addu    %0, %1, %3                              \n"
167                 "       sync                                            \n"
168                 "       .set    mips0                                   \n"
169                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
170                 : "Ir" (i), "m" (v->counter)
171                 : "memory");
172         } else {
173                 unsigned long flags;
174
175                 local_irq_save(flags);
176                 result = v->counter;
177                 result += i;
178                 v->counter = result;
179                 local_irq_restore(flags);
180         }
181
182         return result;
183 }
184
185 static __inline__ int atomic_sub_return(int i, atomic_t * v)
186 {
187         unsigned long result;
188
189         if (cpu_has_llsc && R10000_LLSC_WAR) {
190                 unsigned long temp;
191
192                 __asm__ __volatile__(
193                 "       .set    mips3                                   \n"
194                 "1:     ll      %1, %2          # atomic_sub_return     \n"
195                 "       subu    %0, %1, %3                              \n"
196                 "       sc      %0, %2                                  \n"
197                 "       beqzl   %0, 1b                                  \n"
198                 "       subu    %0, %1, %3                              \n"
199                 "       sync                                            \n"
200                 "       .set    mips0                                   \n"
201                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
202                 : "Ir" (i), "m" (v->counter)
203                 : "memory");
204         } else if (cpu_has_llsc) {
205                 unsigned long temp;
206
207                 __asm__ __volatile__(
208                 "       .set    mips3                                   \n"
209                 "1:     ll      %1, %2          # atomic_sub_return     \n"
210                 "       subu    %0, %1, %3                              \n"
211                 "       sc      %0, %2                                  \n"
212                 "       beqz    %0, 1b                                  \n"
213                 "       subu    %0, %1, %3                              \n"
214                 "       sync                                            \n"
215                 "       .set    mips0                                   \n"
216                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
217                 : "Ir" (i), "m" (v->counter)
218                 : "memory");
219         } else {
220                 unsigned long flags;
221
222                 local_irq_save(flags);
223                 result = v->counter;
224                 result -= i;
225                 v->counter = result;
226                 local_irq_restore(flags);
227         }
228
229         return result;
230 }
231
232 /*
233  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
234  * @i: integer value to subtract
235  * @v: pointer of type atomic_t
236  *
237  * Atomically test @v and subtract @i if @v is greater or equal than @i.
238  * The function returns the old value of @v minus @i.
239  */
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241 {
242         unsigned long result;
243
244         if (cpu_has_llsc && R10000_LLSC_WAR) {
245                 unsigned long temp;
246
247                 __asm__ __volatile__(
248                 "       .set    mips3                                   \n"
249                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
250                 "       subu    %0, %1, %3                              \n"
251                 "       bltz    %0, 1f                                  \n"
252                 "       sc      %0, %2                                  \n"
253                 "       beqzl   %0, 1b                                  \n"
254                 "       sync                                            \n"
255                 "1:                                                     \n"
256                 "       .set    mips0                                   \n"
257                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258                 : "Ir" (i), "m" (v->counter)
259                 : "memory");
260         } else if (cpu_has_llsc) {
261                 unsigned long temp;
262
263                 __asm__ __volatile__(
264                 "       .set    mips3                                   \n"
265                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
266                 "       subu    %0, %1, %3                              \n"
267                 "       bltz    %0, 1f                                  \n"
268                 "       sc      %0, %2                                  \n"
269                 "       beqz    %0, 1b                                  \n"
270                 "       sync                                            \n"
271                 "1:                                                     \n"
272                 "       .set    mips0                                   \n"
273                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274                 : "Ir" (i), "m" (v->counter)
275                 : "memory");
276         } else {
277                 unsigned long flags;
278
279                 local_irq_save(flags);
280                 result = v->counter;
281                 result -= i;
282                 if (result >= 0)
283                         v->counter = result;
284                 local_irq_restore(flags);
285         }
286
287         return result;
288 }
289
290 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
291 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
292
293 /**
294  * atomic_add_unless - add unless the number is a given value
295  * @v: pointer of type atomic_t
296  * @a: the amount to add to v...
297  * @u: ...unless v is equal to u.
298  *
299  * Atomically adds @a to @v, so long as it was not @u.
300  * Returns non-zero if @v was not @u, and zero otherwise.
301  */
302 #define atomic_add_unless(v, a, u)                              \
303 ({                                                              \
304         int c, old;                                             \
305         c = atomic_read(v);                                     \
306         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
307                 c = old;                                        \
308         c != (u);                                               \
309 })
310 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
311
312 #define atomic_dec_return(v) atomic_sub_return(1,(v))
313 #define atomic_inc_return(v) atomic_add_return(1,(v))
314
315 /*
316  * atomic_sub_and_test - subtract value from variable and test result
317  * @i: integer value to subtract
318  * @v: pointer of type atomic_t
319  *
320  * Atomically subtracts @i from @v and returns
321  * true if the result is zero, or false for all
322  * other cases.
323  */
324 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
325
326 /*
327  * atomic_inc_and_test - increment and test
328  * @v: pointer of type atomic_t
329  *
330  * Atomically increments @v by 1
331  * and returns true if the result is zero, or false for all
332  * other cases.
333  */
334 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
335
336 /*
337  * atomic_dec_and_test - decrement by 1 and test
338  * @v: pointer of type atomic_t
339  *
340  * Atomically decrements @v by 1 and
341  * returns true if the result is 0, or false for all other
342  * cases.
343  */
344 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
345
346 /*
347  * atomic_dec_if_positive - decrement by 1 if old value positive
348  * @v: pointer of type atomic_t
349  */
350 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
351
352 /*
353  * atomic_inc - increment atomic variable
354  * @v: pointer of type atomic_t
355  *
356  * Atomically increments @v by 1.
357  */
358 #define atomic_inc(v) atomic_add(1,(v))
359
360 /*
361  * atomic_dec - decrement and test
362  * @v: pointer of type atomic_t
363  *
364  * Atomically decrements @v by 1.
365  */
366 #define atomic_dec(v) atomic_sub(1,(v))
367
368 /*
369  * atomic_add_negative - add and test if negative
370  * @v: pointer of type atomic_t
371  * @i: integer value to add
372  *
373  * Atomically adds @i to @v and returns true
374  * if the result is negative, or false when
375  * result is greater than or equal to zero.
376  */
377 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
378
379 #ifdef CONFIG_64BIT
380
381 typedef struct { volatile __s64 counter; } atomic64_t;
382
383 #define ATOMIC64_INIT(i)    { (i) }
384
385 /*
386  * atomic64_read - read atomic variable
387  * @v: pointer of type atomic64_t
388  *
389  */
390 #define atomic64_read(v)        ((v)->counter)
391
392 /*
393  * atomic64_set - set atomic variable
394  * @v: pointer of type atomic64_t
395  * @i: required value
396  */
397 #define atomic64_set(v,i)       ((v)->counter = (i))
398
399 /*
400  * atomic64_add - add integer to atomic variable
401  * @i: integer value to add
402  * @v: pointer of type atomic64_t
403  *
404  * Atomically adds @i to @v.
405  */
406 static __inline__ void atomic64_add(long i, atomic64_t * v)
407 {
408         if (cpu_has_llsc && R10000_LLSC_WAR) {
409                 unsigned long temp;
410
411                 __asm__ __volatile__(
412                 "       .set    mips3                                   \n"
413                 "1:     lld     %0, %1          # atomic64_add          \n"
414                 "       addu    %0, %2                                  \n"
415                 "       scd     %0, %1                                  \n"
416                 "       beqzl   %0, 1b                                  \n"
417                 "       .set    mips0                                   \n"
418                 : "=&r" (temp), "=m" (v->counter)
419                 : "Ir" (i), "m" (v->counter));
420         } else if (cpu_has_llsc) {
421                 unsigned long temp;
422
423                 __asm__ __volatile__(
424                 "       .set    mips3                                   \n"
425                 "1:     lld     %0, %1          # atomic64_add          \n"
426                 "       addu    %0, %2                                  \n"
427                 "       scd     %0, %1                                  \n"
428                 "       beqz    %0, 1b                                  \n"
429                 "       .set    mips0                                   \n"
430                 : "=&r" (temp), "=m" (v->counter)
431                 : "Ir" (i), "m" (v->counter));
432         } else {
433                 unsigned long flags;
434
435                 local_irq_save(flags);
436                 v->counter += i;
437                 local_irq_restore(flags);
438         }
439 }
440
441 /*
442  * atomic64_sub - subtract the atomic variable
443  * @i: integer value to subtract
444  * @v: pointer of type atomic64_t
445  *
446  * Atomically subtracts @i from @v.
447  */
448 static __inline__ void atomic64_sub(long i, atomic64_t * v)
449 {
450         if (cpu_has_llsc && R10000_LLSC_WAR) {
451                 unsigned long temp;
452
453                 __asm__ __volatile__(
454                 "       .set    mips3                                   \n"
455                 "1:     lld     %0, %1          # atomic64_sub          \n"
456                 "       subu    %0, %2                                  \n"
457                 "       scd     %0, %1                                  \n"
458                 "       beqzl   %0, 1b                                  \n"
459                 "       .set    mips0                                   \n"
460                 : "=&r" (temp), "=m" (v->counter)
461                 : "Ir" (i), "m" (v->counter));
462         } else if (cpu_has_llsc) {
463                 unsigned long temp;
464
465                 __asm__ __volatile__(
466                 "       .set    mips3                                   \n"
467                 "1:     lld     %0, %1          # atomic64_sub          \n"
468                 "       subu    %0, %2                                  \n"
469                 "       scd     %0, %1                                  \n"
470                 "       beqz    %0, 1b                                  \n"
471                 "       .set    mips0                                   \n"
472                 : "=&r" (temp), "=m" (v->counter)
473                 : "Ir" (i), "m" (v->counter));
474         } else {
475                 unsigned long flags;
476
477                 local_irq_save(flags);
478                 v->counter -= i;
479                 local_irq_restore(flags);
480         }
481 }
482
483 /*
484  * Same as above, but return the result value
485  */
486 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
487 {
488         unsigned long result;
489
490         if (cpu_has_llsc && R10000_LLSC_WAR) {
491                 unsigned long temp;
492
493                 __asm__ __volatile__(
494                 "       .set    mips3                                   \n"
495                 "1:     lld     %1, %2          # atomic64_add_return   \n"
496                 "       addu    %0, %1, %3                              \n"
497                 "       scd     %0, %2                                  \n"
498                 "       beqzl   %0, 1b                                  \n"
499                 "       addu    %0, %1, %3                              \n"
500                 "       sync                                            \n"
501                 "       .set    mips0                                   \n"
502                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
503                 : "Ir" (i), "m" (v->counter)
504                 : "memory");
505         } else if (cpu_has_llsc) {
506                 unsigned long temp;
507
508                 __asm__ __volatile__(
509                 "       .set    mips3                                   \n"
510                 "1:     lld     %1, %2          # atomic64_add_return   \n"
511                 "       addu    %0, %1, %3                              \n"
512                 "       scd     %0, %2                                  \n"
513                 "       beqz    %0, 1b                                  \n"
514                 "       addu    %0, %1, %3                              \n"
515                 "       sync                                            \n"
516                 "       .set    mips0                                   \n"
517                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
518                 : "Ir" (i), "m" (v->counter)
519                 : "memory");
520         } else {
521                 unsigned long flags;
522
523                 local_irq_save(flags);
524                 result = v->counter;
525                 result += i;
526                 v->counter = result;
527                 local_irq_restore(flags);
528         }
529
530         return result;
531 }
532
533 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
534 {
535         unsigned long result;
536
537         if (cpu_has_llsc && R10000_LLSC_WAR) {
538                 unsigned long temp;
539
540                 __asm__ __volatile__(
541                 "       .set    mips3                                   \n"
542                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
543                 "       subu    %0, %1, %3                              \n"
544                 "       scd     %0, %2                                  \n"
545                 "       beqzl   %0, 1b                                  \n"
546                 "       subu    %0, %1, %3                              \n"
547                 "       sync                                            \n"
548                 "       .set    mips0                                   \n"
549                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
550                 : "Ir" (i), "m" (v->counter)
551                 : "memory");
552         } else if (cpu_has_llsc) {
553                 unsigned long temp;
554
555                 __asm__ __volatile__(
556                 "       .set    mips3                                   \n"
557                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
558                 "       subu    %0, %1, %3                              \n"
559                 "       scd     %0, %2                                  \n"
560                 "       beqz    %0, 1b                                  \n"
561                 "       subu    %0, %1, %3                              \n"
562                 "       sync                                            \n"
563                 "       .set    mips0                                   \n"
564                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
565                 : "Ir" (i), "m" (v->counter)
566                 : "memory");
567         } else {
568                 unsigned long flags;
569
570                 local_irq_save(flags);
571                 result = v->counter;
572                 result -= i;
573                 v->counter = result;
574                 local_irq_restore(flags);
575         }
576
577         return result;
578 }
579
580 /*
581  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
582  * @i: integer value to subtract
583  * @v: pointer of type atomic64_t
584  *
585  * Atomically test @v and subtract @i if @v is greater or equal than @i.
586  * The function returns the old value of @v minus @i.
587  */
588 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
589 {
590         unsigned long result;
591
592         if (cpu_has_llsc && R10000_LLSC_WAR) {
593                 unsigned long temp;
594
595                 __asm__ __volatile__(
596                 "       .set    mips3                                   \n"
597                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
598                 "       dsubu   %0, %1, %3                              \n"
599                 "       bltz    %0, 1f                                  \n"
600                 "       scd     %0, %2                                  \n"
601                 "       beqzl   %0, 1b                                  \n"
602                 "       sync                                            \n"
603                 "1:                                                     \n"
604                 "       .set    mips0                                   \n"
605                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606                 : "Ir" (i), "m" (v->counter)
607                 : "memory");
608         } else if (cpu_has_llsc) {
609                 unsigned long temp;
610
611                 __asm__ __volatile__(
612                 "       .set    mips3                                   \n"
613                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
614                 "       dsubu   %0, %1, %3                              \n"
615                 "       bltz    %0, 1f                                  \n"
616                 "       scd     %0, %2                                  \n"
617                 "       beqz    %0, 1b                                  \n"
618                 "       sync                                            \n"
619                 "1:                                                     \n"
620                 "       .set    mips0                                   \n"
621                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
622                 : "Ir" (i), "m" (v->counter)
623                 : "memory");
624         } else {
625                 unsigned long flags;
626
627                 local_irq_save(flags);
628                 result = v->counter;
629                 result -= i;
630                 if (result >= 0)
631                         v->counter = result;
632                 local_irq_restore(flags);
633         }
634
635         return result;
636 }
637
638 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
639 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
640
641 /*
642  * atomic64_sub_and_test - subtract value from variable and test result
643  * @i: integer value to subtract
644  * @v: pointer of type atomic64_t
645  *
646  * Atomically subtracts @i from @v and returns
647  * true if the result is zero, or false for all
648  * other cases.
649  */
650 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
651
652 /*
653  * atomic64_inc_and_test - increment and test
654  * @v: pointer of type atomic64_t
655  *
656  * Atomically increments @v by 1
657  * and returns true if the result is zero, or false for all
658  * other cases.
659  */
660 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
661
662 /*
663  * atomic64_dec_and_test - decrement by 1 and test
664  * @v: pointer of type atomic64_t
665  *
666  * Atomically decrements @v by 1 and
667  * returns true if the result is 0, or false for all other
668  * cases.
669  */
670 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
671
672 /*
673  * atomic64_dec_if_positive - decrement by 1 if old value positive
674  * @v: pointer of type atomic64_t
675  */
676 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
677
678 /*
679  * atomic64_inc - increment atomic variable
680  * @v: pointer of type atomic64_t
681  *
682  * Atomically increments @v by 1.
683  */
684 #define atomic64_inc(v) atomic64_add(1,(v))
685
686 /*
687  * atomic64_dec - decrement and test
688  * @v: pointer of type atomic64_t
689  *
690  * Atomically decrements @v by 1.
691  */
692 #define atomic64_dec(v) atomic64_sub(1,(v))
693
694 /*
695  * atomic64_add_negative - add and test if negative
696  * @v: pointer of type atomic64_t
697  * @i: integer value to add
698  *
699  * Atomically adds @i to @v and returns true
700  * if the result is negative, or false when
701  * result is greater than or equal to zero.
702  */
703 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
704
705 #endif /* CONFIG_64BIT */
706
707 /*
708  * atomic*_return operations are serializing but not the non-*_return
709  * versions.
710  */
711 #define smp_mb__before_atomic_dec()     smp_mb()
712 #define smp_mb__after_atomic_dec()      smp_mb()
713 #define smp_mb__before_atomic_inc()     smp_mb()
714 #define smp_mb__after_atomic_inc()      smp_mb()
715
716 #include <asm-generic/atomic.h>
717 #endif /* _ASM_ATOMIC_H */