6423ffa195a409f59b3d6a6881d1b1b866c29dbe
[linux-2.6.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
20 #include <asm/war.h>
21
22 typedef struct { volatile int counter; } atomic_t;
23
24 #define ATOMIC_INIT(i)    { (i) }
25
26 /*
27  * atomic_read - read atomic variable
28  * @v: pointer of type atomic_t
29  *
30  * Atomically reads the value of @v.
31  */
32 #define atomic_read(v)          ((v)->counter)
33
34 /*
35  * atomic_set - set atomic variable
36  * @v: pointer of type atomic_t
37  * @i: required value
38  *
39  * Atomically sets the value of @v to @i.
40  */
41 #define atomic_set(v,i)         ((v)->counter = (i))
42
43 /*
44  * atomic_add - add integer to atomic variable
45  * @i: integer value to add
46  * @v: pointer of type atomic_t
47  *
48  * Atomically adds @i to @v.
49  */
50 static __inline__ void atomic_add(int i, atomic_t * v)
51 {
52         if (cpu_has_llsc && R10000_LLSC_WAR) {
53                 unsigned long temp;
54
55                 __asm__ __volatile__(
56                 "       .set    mips3                                   \n"
57                 "1:     ll      %0, %1          # atomic_add            \n"
58                 "       addu    %0, %2                                  \n"
59                 "       sc      %0, %1                                  \n"
60                 "       beqzl   %0, 1b                                  \n"
61                 "       .set    mips0                                   \n"
62                 : "=&r" (temp), "=m" (v->counter)
63                 : "Ir" (i), "m" (v->counter));
64         } else if (cpu_has_llsc) {
65                 unsigned long temp;
66
67                 __asm__ __volatile__(
68                 "       .set    mips3                                   \n"
69                 "1:     ll      %0, %1          # atomic_add            \n"
70                 "       addu    %0, %2                                  \n"
71                 "       sc      %0, %1                                  \n"
72                 "       beqz    %0, 2f                                  \n"
73                 "       .subsection 2                                   \n"
74                 "2:     b       1b                                      \n"
75                 "       .previous                                       \n"
76                 "       .set    mips0                                   \n"
77                 : "=&r" (temp), "=m" (v->counter)
78                 : "Ir" (i), "m" (v->counter));
79         } else {
80                 unsigned long flags;
81
82                 raw_local_irq_save(flags);
83                 v->counter += i;
84                 raw_local_irq_restore(flags);
85         }
86 }
87
88 /*
89  * atomic_sub - subtract the atomic variable
90  * @i: integer value to subtract
91  * @v: pointer of type atomic_t
92  *
93  * Atomically subtracts @i from @v.
94  */
95 static __inline__ void atomic_sub(int i, atomic_t * v)
96 {
97         if (cpu_has_llsc && R10000_LLSC_WAR) {
98                 unsigned long temp;
99
100                 __asm__ __volatile__(
101                 "       .set    mips3                                   \n"
102                 "1:     ll      %0, %1          # atomic_sub            \n"
103                 "       subu    %0, %2                                  \n"
104                 "       sc      %0, %1                                  \n"
105                 "       beqzl   %0, 1b                                  \n"
106                 "       .set    mips0                                   \n"
107                 : "=&r" (temp), "=m" (v->counter)
108                 : "Ir" (i), "m" (v->counter));
109         } else if (cpu_has_llsc) {
110                 unsigned long temp;
111
112                 __asm__ __volatile__(
113                 "       .set    mips3                                   \n"
114                 "1:     ll      %0, %1          # atomic_sub            \n"
115                 "       subu    %0, %2                                  \n"
116                 "       sc      %0, %1                                  \n"
117                 "       beqz    %0, 2f                                  \n"
118                 "       .subsection 2                                   \n"
119                 "2:     b       1b                                      \n"
120                 "       .previous                                       \n"
121                 "       .set    mips0                                   \n"
122                 : "=&r" (temp), "=m" (v->counter)
123                 : "Ir" (i), "m" (v->counter));
124         } else {
125                 unsigned long flags;
126
127                 raw_local_irq_save(flags);
128                 v->counter -= i;
129                 raw_local_irq_restore(flags);
130         }
131 }
132
133 /*
134  * Same as above, but return the result value
135  */
136 static __inline__ int atomic_add_return(int i, atomic_t * v)
137 {
138         unsigned long result;
139
140         smp_mb();
141
142         if (cpu_has_llsc && R10000_LLSC_WAR) {
143                 unsigned long temp;
144
145                 __asm__ __volatile__(
146                 "       .set    mips3                                   \n"
147                 "1:     ll      %1, %2          # atomic_add_return     \n"
148                 "       addu    %0, %1, %3                              \n"
149                 "       sc      %0, %2                                  \n"
150                 "       beqzl   %0, 1b                                  \n"
151                 "       addu    %0, %1, %3                              \n"
152                 "       .set    mips0                                   \n"
153                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154                 : "Ir" (i), "m" (v->counter)
155                 : "memory");
156         } else if (cpu_has_llsc) {
157                 unsigned long temp;
158
159                 __asm__ __volatile__(
160                 "       .set    mips3                                   \n"
161                 "1:     ll      %1, %2          # atomic_add_return     \n"
162                 "       addu    %0, %1, %3                              \n"
163                 "       sc      %0, %2                                  \n"
164                 "       beqz    %0, 2f                                  \n"
165                 "       addu    %0, %1, %3                              \n"
166                 "       .subsection 2                                   \n"
167                 "2:     b       1b                                      \n"
168                 "       .previous                                       \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 raw_local_irq_save(flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 raw_local_irq_restore(flags);
181         }
182
183         smp_mb();
184
185         return result;
186 }
187
188 static __inline__ int atomic_sub_return(int i, atomic_t * v)
189 {
190         unsigned long result;
191
192         smp_mb();
193
194         if (cpu_has_llsc && R10000_LLSC_WAR) {
195                 unsigned long temp;
196
197                 __asm__ __volatile__(
198                 "       .set    mips3                                   \n"
199                 "1:     ll      %1, %2          # atomic_sub_return     \n"
200                 "       subu    %0, %1, %3                              \n"
201                 "       sc      %0, %2                                  \n"
202                 "       beqzl   %0, 1b                                  \n"
203                 "       subu    %0, %1, %3                              \n"
204                 "       .set    mips0                                   \n"
205                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206                 : "Ir" (i), "m" (v->counter)
207                 : "memory");
208         } else if (cpu_has_llsc) {
209                 unsigned long temp;
210
211                 __asm__ __volatile__(
212                 "       .set    mips3                                   \n"
213                 "1:     ll      %1, %2          # atomic_sub_return     \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sc      %0, %2                                  \n"
216                 "       beqz    %0, 2f                                  \n"
217                 "       subu    %0, %1, %3                              \n"
218                 "       .subsection 2                                   \n"
219                 "2:     b       1b                                      \n"
220                 "       .previous                                       \n"
221                 "       .set    mips0                                   \n"
222                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223                 : "Ir" (i), "m" (v->counter)
224                 : "memory");
225         } else {
226                 unsigned long flags;
227
228                 raw_local_irq_save(flags);
229                 result = v->counter;
230                 result -= i;
231                 v->counter = result;
232                 raw_local_irq_restore(flags);
233         }
234
235         smp_mb();
236
237         return result;
238 }
239
240 /*
241  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242  * @i: integer value to subtract
243  * @v: pointer of type atomic_t
244  *
245  * Atomically test @v and subtract @i if @v is greater or equal than @i.
246  * The function returns the old value of @v minus @i.
247  */
248 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249 {
250         unsigned long result;
251
252         smp_mb();
253
254         if (cpu_has_llsc && R10000_LLSC_WAR) {
255                 unsigned long temp;
256
257                 __asm__ __volatile__(
258                 "       .set    mips3                                   \n"
259                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
260                 "       subu    %0, %1, %3                              \n"
261                 "       bltz    %0, 1f                                  \n"
262                 "       sc      %0, %2                                  \n"
263                 "       .set    noreorder                               \n"
264                 "       beqzl   %0, 1b                                  \n"
265                 "        subu   %0, %1, %3                              \n"
266                 "       .set    reorder                                 \n"
267                 "1:                                                     \n"
268                 "       .set    mips0                                   \n"
269                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270                 : "Ir" (i), "m" (v->counter)
271                 : "memory");
272         } else if (cpu_has_llsc) {
273                 unsigned long temp;
274
275                 __asm__ __volatile__(
276                 "       .set    mips3                                   \n"
277                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
278                 "       subu    %0, %1, %3                              \n"
279                 "       bltz    %0, 1f                                  \n"
280                 "       sc      %0, %2                                  \n"
281                 "       .set    noreorder                               \n"
282                 "       beqz    %0, 2f                                  \n"
283                 "        subu   %0, %1, %3                              \n"
284                 "       .set    reorder                                 \n"
285                 "1:                                                     \n"
286                 "       .subsection 2                                   \n"
287                 "2:     b       1b                                      \n"
288                 "       .previous                                       \n"
289                 "       .set    mips0                                   \n"
290                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291                 : "Ir" (i), "m" (v->counter)
292                 : "memory");
293         } else {
294                 unsigned long flags;
295
296                 raw_local_irq_save(flags);
297                 result = v->counter;
298                 result -= i;
299                 if (result >= 0)
300                         v->counter = result;
301                 raw_local_irq_restore(flags);
302         }
303
304         smp_mb();
305
306         return result;
307 }
308
309 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
310 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
311
312 /**
313  * atomic_add_unless - add unless the number is a given value
314  * @v: pointer of type atomic_t
315  * @a: the amount to add to v...
316  * @u: ...unless v is equal to u.
317  *
318  * Atomically adds @a to @v, so long as it was not @u.
319  * Returns non-zero if @v was not @u, and zero otherwise.
320  */
321 #define atomic_add_unless(v, a, u)                              \
322 ({                                                              \
323         __typeof__((v)->counter) c, old;                        \
324         c = atomic_read(v);                                     \
325         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
326                 c = old;                                        \
327         c != (u);                                               \
328 })
329 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
330
331 #define atomic_dec_return(v) atomic_sub_return(1,(v))
332 #define atomic_inc_return(v) atomic_add_return(1,(v))
333
334 /*
335  * atomic_sub_and_test - subtract value from variable and test result
336  * @i: integer value to subtract
337  * @v: pointer of type atomic_t
338  *
339  * Atomically subtracts @i from @v and returns
340  * true if the result is zero, or false for all
341  * other cases.
342  */
343 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
344
345 /*
346  * atomic_inc_and_test - increment and test
347  * @v: pointer of type atomic_t
348  *
349  * Atomically increments @v by 1
350  * and returns true if the result is zero, or false for all
351  * other cases.
352  */
353 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
354
355 /*
356  * atomic_dec_and_test - decrement by 1 and test
357  * @v: pointer of type atomic_t
358  *
359  * Atomically decrements @v by 1 and
360  * returns true if the result is 0, or false for all other
361  * cases.
362  */
363 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
364
365 /*
366  * atomic_dec_if_positive - decrement by 1 if old value positive
367  * @v: pointer of type atomic_t
368  */
369 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
370
371 /*
372  * atomic_inc - increment atomic variable
373  * @v: pointer of type atomic_t
374  *
375  * Atomically increments @v by 1.
376  */
377 #define atomic_inc(v) atomic_add(1,(v))
378
379 /*
380  * atomic_dec - decrement and test
381  * @v: pointer of type atomic_t
382  *
383  * Atomically decrements @v by 1.
384  */
385 #define atomic_dec(v) atomic_sub(1,(v))
386
387 /*
388  * atomic_add_negative - add and test if negative
389  * @v: pointer of type atomic_t
390  * @i: integer value to add
391  *
392  * Atomically adds @i to @v and returns true
393  * if the result is negative, or false when
394  * result is greater than or equal to zero.
395  */
396 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
397
398 #ifdef CONFIG_64BIT
399
400 typedef struct { volatile long counter; } atomic64_t;
401
402 #define ATOMIC64_INIT(i)    { (i) }
403
404 /*
405  * atomic64_read - read atomic variable
406  * @v: pointer of type atomic64_t
407  *
408  */
409 #define atomic64_read(v)        ((v)->counter)
410
411 /*
412  * atomic64_set - set atomic variable
413  * @v: pointer of type atomic64_t
414  * @i: required value
415  */
416 #define atomic64_set(v,i)       ((v)->counter = (i))
417
418 /*
419  * atomic64_add - add integer to atomic variable
420  * @i: integer value to add
421  * @v: pointer of type atomic64_t
422  *
423  * Atomically adds @i to @v.
424  */
425 static __inline__ void atomic64_add(long i, atomic64_t * v)
426 {
427         if (cpu_has_llsc && R10000_LLSC_WAR) {
428                 unsigned long temp;
429
430                 __asm__ __volatile__(
431                 "       .set    mips3                                   \n"
432                 "1:     lld     %0, %1          # atomic64_add          \n"
433                 "       addu    %0, %2                                  \n"
434                 "       scd     %0, %1                                  \n"
435                 "       beqzl   %0, 1b                                  \n"
436                 "       .set    mips0                                   \n"
437                 : "=&r" (temp), "=m" (v->counter)
438                 : "Ir" (i), "m" (v->counter));
439         } else if (cpu_has_llsc) {
440                 unsigned long temp;
441
442                 __asm__ __volatile__(
443                 "       .set    mips3                                   \n"
444                 "1:     lld     %0, %1          # atomic64_add          \n"
445                 "       addu    %0, %2                                  \n"
446                 "       scd     %0, %1                                  \n"
447                 "       beqz    %0, 2f                                  \n"
448                 "       .subsection 2                                   \n"
449                 "2:     b       1b                                      \n"
450                 "       .previous                                       \n"
451                 "       .set    mips0                                   \n"
452                 : "=&r" (temp), "=m" (v->counter)
453                 : "Ir" (i), "m" (v->counter));
454         } else {
455                 unsigned long flags;
456
457                 raw_local_irq_save(flags);
458                 v->counter += i;
459                 raw_local_irq_restore(flags);
460         }
461 }
462
463 /*
464  * atomic64_sub - subtract the atomic variable
465  * @i: integer value to subtract
466  * @v: pointer of type atomic64_t
467  *
468  * Atomically subtracts @i from @v.
469  */
470 static __inline__ void atomic64_sub(long i, atomic64_t * v)
471 {
472         if (cpu_has_llsc && R10000_LLSC_WAR) {
473                 unsigned long temp;
474
475                 __asm__ __volatile__(
476                 "       .set    mips3                                   \n"
477                 "1:     lld     %0, %1          # atomic64_sub          \n"
478                 "       subu    %0, %2                                  \n"
479                 "       scd     %0, %1                                  \n"
480                 "       beqzl   %0, 1b                                  \n"
481                 "       .set    mips0                                   \n"
482                 : "=&r" (temp), "=m" (v->counter)
483                 : "Ir" (i), "m" (v->counter));
484         } else if (cpu_has_llsc) {
485                 unsigned long temp;
486
487                 __asm__ __volatile__(
488                 "       .set    mips3                                   \n"
489                 "1:     lld     %0, %1          # atomic64_sub          \n"
490                 "       subu    %0, %2                                  \n"
491                 "       scd     %0, %1                                  \n"
492                 "       beqz    %0, 2f                                  \n"
493                 "       .subsection 2                                   \n"
494                 "2:     b       1b                                      \n"
495                 "       .previous                                       \n"
496                 "       .set    mips0                                   \n"
497                 : "=&r" (temp), "=m" (v->counter)
498                 : "Ir" (i), "m" (v->counter));
499         } else {
500                 unsigned long flags;
501
502                 raw_local_irq_save(flags);
503                 v->counter -= i;
504                 raw_local_irq_restore(flags);
505         }
506 }
507
508 /*
509  * Same as above, but return the result value
510  */
511 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
512 {
513         unsigned long result;
514
515         smp_mb();
516
517         if (cpu_has_llsc && R10000_LLSC_WAR) {
518                 unsigned long temp;
519
520                 __asm__ __volatile__(
521                 "       .set    mips3                                   \n"
522                 "1:     lld     %1, %2          # atomic64_add_return   \n"
523                 "       addu    %0, %1, %3                              \n"
524                 "       scd     %0, %2                                  \n"
525                 "       beqzl   %0, 1b                                  \n"
526                 "       addu    %0, %1, %3                              \n"
527                 "       .set    mips0                                   \n"
528                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
529                 : "Ir" (i), "m" (v->counter)
530                 : "memory");
531         } else if (cpu_has_llsc) {
532                 unsigned long temp;
533
534                 __asm__ __volatile__(
535                 "       .set    mips3                                   \n"
536                 "1:     lld     %1, %2          # atomic64_add_return   \n"
537                 "       addu    %0, %1, %3                              \n"
538                 "       scd     %0, %2                                  \n"
539                 "       beqz    %0, 2f                                  \n"
540                 "       addu    %0, %1, %3                              \n"
541                 "       .subsection 2                                   \n"
542                 "2:     b       1b                                      \n"
543                 "       .previous                                       \n"
544                 "       .set    mips0                                   \n"
545                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
546                 : "Ir" (i), "m" (v->counter)
547                 : "memory");
548         } else {
549                 unsigned long flags;
550
551                 raw_local_irq_save(flags);
552                 result = v->counter;
553                 result += i;
554                 v->counter = result;
555                 raw_local_irq_restore(flags);
556         }
557
558         smp_mb();
559
560         return result;
561 }
562
563 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
564 {
565         unsigned long result;
566
567         smp_mb();
568
569         if (cpu_has_llsc && R10000_LLSC_WAR) {
570                 unsigned long temp;
571
572                 __asm__ __volatile__(
573                 "       .set    mips3                                   \n"
574                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
575                 "       subu    %0, %1, %3                              \n"
576                 "       scd     %0, %2                                  \n"
577                 "       beqzl   %0, 1b                                  \n"
578                 "       subu    %0, %1, %3                              \n"
579                 "       .set    mips0                                   \n"
580                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
581                 : "Ir" (i), "m" (v->counter)
582                 : "memory");
583         } else if (cpu_has_llsc) {
584                 unsigned long temp;
585
586                 __asm__ __volatile__(
587                 "       .set    mips3                                   \n"
588                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
589                 "       subu    %0, %1, %3                              \n"
590                 "       scd     %0, %2                                  \n"
591                 "       beqz    %0, 2f                                  \n"
592                 "       subu    %0, %1, %3                              \n"
593                 "       .subsection 2                                   \n"
594                 "2:     b       1b                                      \n"
595                 "       .previous                                       \n"
596                 "       .set    mips0                                   \n"
597                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
598                 : "Ir" (i), "m" (v->counter)
599                 : "memory");
600         } else {
601                 unsigned long flags;
602
603                 raw_local_irq_save(flags);
604                 result = v->counter;
605                 result -= i;
606                 v->counter = result;
607                 raw_local_irq_restore(flags);
608         }
609
610         smp_mb();
611
612         return result;
613 }
614
615 /*
616  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
617  * @i: integer value to subtract
618  * @v: pointer of type atomic64_t
619  *
620  * Atomically test @v and subtract @i if @v is greater or equal than @i.
621  * The function returns the old value of @v minus @i.
622  */
623 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
624 {
625         unsigned long result;
626
627         smp_mb();
628
629         if (cpu_has_llsc && R10000_LLSC_WAR) {
630                 unsigned long temp;
631
632                 __asm__ __volatile__(
633                 "       .set    mips3                                   \n"
634                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
635                 "       dsubu   %0, %1, %3                              \n"
636                 "       bltz    %0, 1f                                  \n"
637                 "       scd     %0, %2                                  \n"
638                 "       .set    noreorder                               \n"
639                 "       beqzl   %0, 1b                                  \n"
640                 "        dsubu  %0, %1, %3                              \n"
641                 "       .set    reorder                                 \n"
642                 "1:                                                     \n"
643                 "       .set    mips0                                   \n"
644                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
645                 : "Ir" (i), "m" (v->counter)
646                 : "memory");
647         } else if (cpu_has_llsc) {
648                 unsigned long temp;
649
650                 __asm__ __volatile__(
651                 "       .set    mips3                                   \n"
652                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
653                 "       dsubu   %0, %1, %3                              \n"
654                 "       bltz    %0, 1f                                  \n"
655                 "       scd     %0, %2                                  \n"
656                 "       .set    noreorder                               \n"
657                 "       beqz    %0, 2f                                  \n"
658                 "        dsubu  %0, %1, %3                              \n"
659                 "       .set    reorder                                 \n"
660                 "1:                                                     \n"
661                 "       .subsection 2                                   \n"
662                 "2:     b       1b                                      \n"
663                 "       .previous                                       \n"
664                 "       .set    mips0                                   \n"
665                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
666                 : "Ir" (i), "m" (v->counter)
667                 : "memory");
668         } else {
669                 unsigned long flags;
670
671                 raw_local_irq_save(flags);
672                 result = v->counter;
673                 result -= i;
674                 if (result >= 0)
675                         v->counter = result;
676                 raw_local_irq_restore(flags);
677         }
678
679         smp_mb();
680
681         return result;
682 }
683
684 #define atomic64_cmpxchg(v, o, n) \
685         (((__typeof__((v)->counter)))cmpxchg(&((v)->counter), (o), (n)))
686 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
687
688 /**
689  * atomic64_add_unless - add unless the number is a given value
690  * @v: pointer of type atomic64_t
691  * @a: the amount to add to v...
692  * @u: ...unless v is equal to u.
693  *
694  * Atomically adds @a to @v, so long as it was not @u.
695  * Returns non-zero if @v was not @u, and zero otherwise.
696  */
697 #define atomic64_add_unless(v, a, u)                            \
698 ({                                                              \
699         __typeof__((v)->counter) c, old;                        \
700         c = atomic_read(v);                                     \
701         while (c != (u) && (old = atomic64_cmpxchg((v), c, c + (a))) != c) \
702                 c = old;                                        \
703         c != (u);                                               \
704 })
705 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
706
707 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
708 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
709
710 /*
711  * atomic64_sub_and_test - subtract value from variable and test result
712  * @i: integer value to subtract
713  * @v: pointer of type atomic64_t
714  *
715  * Atomically subtracts @i from @v and returns
716  * true if the result is zero, or false for all
717  * other cases.
718  */
719 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
720
721 /*
722  * atomic64_inc_and_test - increment and test
723  * @v: pointer of type atomic64_t
724  *
725  * Atomically increments @v by 1
726  * and returns true if the result is zero, or false for all
727  * other cases.
728  */
729 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
730
731 /*
732  * atomic64_dec_and_test - decrement by 1 and test
733  * @v: pointer of type atomic64_t
734  *
735  * Atomically decrements @v by 1 and
736  * returns true if the result is 0, or false for all other
737  * cases.
738  */
739 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
740
741 /*
742  * atomic64_dec_if_positive - decrement by 1 if old value positive
743  * @v: pointer of type atomic64_t
744  */
745 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
746
747 /*
748  * atomic64_inc - increment atomic variable
749  * @v: pointer of type atomic64_t
750  *
751  * Atomically increments @v by 1.
752  */
753 #define atomic64_inc(v) atomic64_add(1,(v))
754
755 /*
756  * atomic64_dec - decrement and test
757  * @v: pointer of type atomic64_t
758  *
759  * Atomically decrements @v by 1.
760  */
761 #define atomic64_dec(v) atomic64_sub(1,(v))
762
763 /*
764  * atomic64_add_negative - add and test if negative
765  * @v: pointer of type atomic64_t
766  * @i: integer value to add
767  *
768  * Atomically adds @i to @v and returns true
769  * if the result is negative, or false when
770  * result is greater than or equal to zero.
771  */
772 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
773
774 #endif /* CONFIG_64BIT */
775
776 /*
777  * atomic*_return operations are serializing but not the non-*_return
778  * versions.
779  */
780 #define smp_mb__before_atomic_dec()     smp_mb()
781 #define smp_mb__after_atomic_dec()      smp_mb()
782 #define smp_mb__before_atomic_inc()     smp_mb()
783 #define smp_mb__after_atomic_inc()      smp_mb()
784
785 #include <asm-generic/atomic.h>
786 #endif /* _ASM_ATOMIC_H */