]> nv-tegra.nvidia Code Review - linux-2.6.git/blob - include/asm-mips/bitops.h
[MIPS] Use MIPS R2 instructions for bitops.
[linux-2.6.git] / include / asm-mips / bitops.h
1 /*
2  * This file is subject to the terms and conditions of the GNU General Public
3  * License.  See the file "COPYING" in the main directory of this archive
4  * for more details.
5  *
6  * Copyright (c) 1994 - 1997, 99, 2000, 06, 07  Ralf Baechle (ralf@linux-mips.org)
7  * Copyright (c) 1999, 2000  Silicon Graphics, Inc.
8  */
9 #ifndef _ASM_BITOPS_H
10 #define _ASM_BITOPS_H
11
12 #include <linux/compiler.h>
13 #include <linux/irqflags.h>
14 #include <linux/types.h>
15 #include <asm/barrier.h>
16 #include <asm/bug.h>
17 #include <asm/byteorder.h>              /* sigh ... */
18 #include <asm/cpu-features.h>
19 #include <asm/sgidefs.h>
20 #include <asm/war.h>
21
22 #if (_MIPS_SZLONG == 32)
23 #define SZLONG_LOG 5
24 #define SZLONG_MASK 31UL
25 #define __LL            "ll     "
26 #define __SC            "sc     "
27 #define __INS           "ins    "
28 #define __EXT           "ext    "
29 #elif (_MIPS_SZLONG == 64)
30 #define SZLONG_LOG 6
31 #define SZLONG_MASK 63UL
32 #define __LL            "lld    "
33 #define __SC            "scd    "
34 #define __INS           "dins    "
35 #define __EXT           "dext    "
36 #endif
37
38 /*
39  * clear_bit() doesn't provide any barrier for the compiler.
40  */
41 #define smp_mb__before_clear_bit()      smp_mb()
42 #define smp_mb__after_clear_bit()       smp_mb()
43
44 /*
45  * set_bit - Atomically set a bit in memory
46  * @nr: the bit to set
47  * @addr: the address to start counting from
48  *
49  * This function is atomic and may not be reordered.  See __set_bit()
50  * if you do not require the atomic guarantees.
51  * Note that @nr may be almost arbitrarily large; this function is not
52  * restricted to acting on a single-word quantity.
53  */
54 static inline void set_bit(unsigned long nr, volatile unsigned long *addr)
55 {
56         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
57         unsigned long temp;
58
59         if (cpu_has_llsc && R10000_LLSC_WAR) {
60                 __asm__ __volatile__(
61                 "       .set    mips3                                   \n"
62                 "1:     " __LL "%0, %1                  # set_bit       \n"
63                 "       or      %0, %2                                  \n"
64                 "       " __SC  "%0, %1                                 \n"
65                 "       beqzl   %0, 1b                                  \n"
66                 "       .set    mips0                                   \n"
67                 : "=&r" (temp), "=m" (*m)
68                 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
69 #ifdef CONFIG_CPU_MIPSR2
70         } else if (__builtin_constant_p(nr)) {
71                 __asm__ __volatile__(
72                 "1:     " __LL "%0, %1                  # set_bit       \n"
73                 "       " __INS "%0, %4, %2, 1                          \n"
74                 "       " __SC "%0, %1                                  \n"
75                 "       beqz    %0, 2f                                  \n"
76                 "       .subsection 2                                   \n"
77                 "2:     b       1b                                      \n"
78                 "       .previous                                       \n"
79                 : "=&r" (temp), "=m" (*m)
80                 : "ir" (nr & SZLONG_MASK), "m" (*m), "r" (~0));
81 #endif /* CONFIG_CPU_MIPSR2 */
82         } else if (cpu_has_llsc) {
83                 __asm__ __volatile__(
84                 "       .set    mips3                                   \n"
85                 "1:     " __LL "%0, %1                  # set_bit       \n"
86                 "       or      %0, %2                                  \n"
87                 "       " __SC  "%0, %1                                 \n"
88                 "       beqz    %0, 2f                                  \n"
89                 "       .subsection 2                                   \n"
90                 "2:     b       1b                                      \n"
91                 "       .previous                                       \n"
92                 "       .set    mips0                                   \n"
93                 : "=&r" (temp), "=m" (*m)
94                 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
95         } else {
96                 volatile unsigned long *a = addr;
97                 unsigned long mask;
98                 unsigned long flags;
99
100                 a += nr >> SZLONG_LOG;
101                 mask = 1UL << (nr & SZLONG_MASK);
102                 local_irq_save(flags);
103                 *a |= mask;
104                 local_irq_restore(flags);
105         }
106 }
107
108 /*
109  * clear_bit - Clears a bit in memory
110  * @nr: Bit to clear
111  * @addr: Address to start counting from
112  *
113  * clear_bit() is atomic and may not be reordered.  However, it does
114  * not contain a memory barrier, so if it is used for locking purposes,
115  * you should call smp_mb__before_clear_bit() and/or smp_mb__after_clear_bit()
116  * in order to ensure changes are visible on other processors.
117  */
118 static inline void clear_bit(unsigned long nr, volatile unsigned long *addr)
119 {
120         unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
121         unsigned long temp;
122
123         if (cpu_has_llsc && R10000_LLSC_WAR) {
124                 __asm__ __volatile__(
125                 "       .set    mips3                                   \n"
126                 "1:     " __LL "%0, %1                  # clear_bit     \n"
127                 "       and     %0, %2                                  \n"
128                 "       " __SC "%0, %1                                  \n"
129                 "       beqzl   %0, 1b                                  \n"
130                 "       .set    mips0                                   \n"
131                 : "=&r" (temp), "=m" (*m)
132                 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
133 #ifdef CONFIG_CPU_MIPSR2
134         } else if (__builtin_constant_p(nr)) {
135                 __asm__ __volatile__(
136                 "1:     " __LL "%0, %1                  # clear_bit     \n"
137                 "       " __INS "%0, $0, %2, 1                          \n"
138                 "       " __SC "%0, %1                                  \n"
139                 "       beqz    %0, 2f                                  \n"
140                 "       .subsection 2                                   \n"
141                 "2:     b       1b                                      \n"
142                 "       .previous                                       \n"
143                 : "=&r" (temp), "=m" (*m)
144                 : "ir" (nr & SZLONG_MASK), "m" (*m));
145 #endif /* CONFIG_CPU_MIPSR2 */
146         } else if (cpu_has_llsc) {
147                 __asm__ __volatile__(
148                 "       .set    mips3                                   \n"
149                 "1:     " __LL "%0, %1                  # clear_bit     \n"
150                 "       and     %0, %2                                  \n"
151                 "       " __SC "%0, %1                                  \n"
152                 "       beqz    %0, 2f                                  \n"
153                 "       .subsection 2                                   \n"
154                 "2:     b       1b                                      \n"
155                 "       .previous                                       \n"
156                 "       .set    mips0                                   \n"
157                 : "=&r" (temp), "=m" (*m)
158                 : "ir" (~(1UL << (nr & SZLONG_MASK))), "m" (*m));
159         } else {
160                 volatile unsigned long *a = addr;
161                 unsigned long mask;
162                 unsigned long flags;
163
164                 a += nr >> SZLONG_LOG;
165                 mask = 1UL << (nr & SZLONG_MASK);
166                 local_irq_save(flags);
167                 *a &= ~mask;
168                 local_irq_restore(flags);
169         }
170 }
171
172 /*
173  * change_bit - Toggle a bit in memory
174  * @nr: Bit to change
175  * @addr: Address to start counting from
176  *
177  * change_bit() is atomic and may not be reordered.
178  * Note that @nr may be almost arbitrarily large; this function is not
179  * restricted to acting on a single-word quantity.
180  */
181 static inline void change_bit(unsigned long nr, volatile unsigned long *addr)
182 {
183         if (cpu_has_llsc && R10000_LLSC_WAR) {
184                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
185                 unsigned long temp;
186
187                 __asm__ __volatile__(
188                 "       .set    mips3                           \n"
189                 "1:     " __LL "%0, %1          # change_bit    \n"
190                 "       xor     %0, %2                          \n"
191                 "       " __SC  "%0, %1                         \n"
192                 "       beqzl   %0, 1b                          \n"
193                 "       .set    mips0                           \n"
194                 : "=&r" (temp), "=m" (*m)
195                 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
196         } else if (cpu_has_llsc) {
197                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
198                 unsigned long temp;
199
200                 __asm__ __volatile__(
201                 "       .set    mips3                           \n"
202                 "1:     " __LL "%0, %1          # change_bit    \n"
203                 "       xor     %0, %2                          \n"
204                 "       " __SC  "%0, %1                         \n"
205                 "       beqz    %0, 2f                          \n"
206                 "       .subsection 2                           \n"
207                 "2:     b       1b                              \n"
208                 "       .previous                               \n"
209                 "       .set    mips0                           \n"
210                 : "=&r" (temp), "=m" (*m)
211                 : "ir" (1UL << (nr & SZLONG_MASK)), "m" (*m));
212         } else {
213                 volatile unsigned long *a = addr;
214                 unsigned long mask;
215                 unsigned long flags;
216
217                 a += nr >> SZLONG_LOG;
218                 mask = 1UL << (nr & SZLONG_MASK);
219                 local_irq_save(flags);
220                 *a ^= mask;
221                 local_irq_restore(flags);
222         }
223 }
224
225 /*
226  * test_and_set_bit - Set a bit and return its old value
227  * @nr: Bit to set
228  * @addr: Address to count from
229  *
230  * This operation is atomic and cannot be reordered.
231  * It also implies a memory barrier.
232  */
233 static inline int test_and_set_bit(unsigned long nr,
234         volatile unsigned long *addr)
235 {
236         if (cpu_has_llsc && R10000_LLSC_WAR) {
237                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
238                 unsigned long temp, res;
239
240                 __asm__ __volatile__(
241                 "       .set    mips3                                   \n"
242                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
243                 "       or      %2, %0, %3                              \n"
244                 "       " __SC  "%2, %1                                 \n"
245                 "       beqzl   %2, 1b                                  \n"
246                 "       and     %2, %0, %3                              \n"
247                 "       .set    mips0                                   \n"
248                 : "=&r" (temp), "=m" (*m), "=&r" (res)
249                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
250                 : "memory");
251
252                 return res != 0;
253         } else if (cpu_has_llsc) {
254                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
255                 unsigned long temp, res;
256
257                 __asm__ __volatile__(
258                 "       .set    push                                    \n"
259                 "       .set    noreorder                               \n"
260                 "       .set    mips3                                   \n"
261                 "1:     " __LL "%0, %1          # test_and_set_bit      \n"
262                 "       or      %2, %0, %3                              \n"
263                 "       " __SC  "%2, %1                                 \n"
264                 "       beqz    %2, 2f                                  \n"
265                 "        and    %2, %0, %3                              \n"
266                 "       .subsection 2                                   \n"
267                 "2:     b       1b                                      \n"
268                 "        nop                                            \n"
269                 "       .previous                                       \n"
270                 "       .set    pop                                     \n"
271                 : "=&r" (temp), "=m" (*m), "=&r" (res)
272                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
273                 : "memory");
274
275                 return res != 0;
276         } else {
277                 volatile unsigned long *a = addr;
278                 unsigned long mask;
279                 int retval;
280                 unsigned long flags;
281
282                 a += nr >> SZLONG_LOG;
283                 mask = 1UL << (nr & SZLONG_MASK);
284                 local_irq_save(flags);
285                 retval = (mask & *a) != 0;
286                 *a |= mask;
287                 local_irq_restore(flags);
288
289                 return retval;
290         }
291
292         smp_mb();
293 }
294
295 /*
296  * test_and_clear_bit - Clear a bit and return its old value
297  * @nr: Bit to clear
298  * @addr: Address to count from
299  *
300  * This operation is atomic and cannot be reordered.
301  * It also implies a memory barrier.
302  */
303 static inline int test_and_clear_bit(unsigned long nr,
304         volatile unsigned long *addr)
305 {
306         if (cpu_has_llsc && R10000_LLSC_WAR) {
307                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
308                 unsigned long temp, res;
309
310                 __asm__ __volatile__(
311                 "       .set    mips3                                   \n"
312                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
313                 "       or      %2, %0, %3                              \n"
314                 "       xor     %2, %3                                  \n"
315                 "       " __SC  "%2, %1                                 \n"
316                 "       beqzl   %2, 1b                                  \n"
317                 "       and     %2, %0, %3                              \n"
318                 "       .set    mips0                                   \n"
319                 : "=&r" (temp), "=m" (*m), "=&r" (res)
320                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
321                 : "memory");
322
323                 return res != 0;
324 #ifdef CONFIG_CPU_MIPSR2
325         } else if (__builtin_constant_p(nr)) {
326                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
327                 unsigned long temp, res;
328
329                 __asm__ __volatile__(
330                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
331                 "       " __EXT "%2, %0, %3, 1                          \n"
332                 "       " __INS "%0, $0, %3, 1                          \n"
333                 "       " __SC  "%0, %1                                 \n"
334                 "       beqz    %0, 2f                                  \n"
335                 "       .subsection 2                                   \n"
336                 "2:     b       1b                                      \n"
337                 "       .previous                                       \n"
338                 : "=&r" (temp), "=m" (*m), "=&r" (res)
339                 : "ri" (nr & SZLONG_MASK), "m" (*m)
340                 : "memory");
341
342                 return res;
343 #endif
344         } else if (cpu_has_llsc) {
345                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
346                 unsigned long temp, res;
347
348                 __asm__ __volatile__(
349                 "       .set    push                                    \n"
350                 "       .set    noreorder                               \n"
351                 "       .set    mips3                                   \n"
352                 "1:     " __LL  "%0, %1         # test_and_clear_bit    \n"
353                 "       or      %2, %0, %3                              \n"
354                 "       xor     %2, %3                                  \n"
355                 "       " __SC  "%2, %1                                 \n"
356                 "       beqz    %2, 2f                                  \n"
357                 "        and    %2, %0, %3                              \n"
358                 "       .subsection 2                                   \n"
359                 "2:     b       1b                                      \n"
360                 "        nop                                            \n"
361                 "       .previous                                       \n"
362                 "       .set    pop                                     \n"
363                 : "=&r" (temp), "=m" (*m), "=&r" (res)
364                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
365                 : "memory");
366
367                 return res != 0;
368         } else {
369                 volatile unsigned long *a = addr;
370                 unsigned long mask;
371                 int retval;
372                 unsigned long flags;
373
374                 a += nr >> SZLONG_LOG;
375                 mask = 1UL << (nr & SZLONG_MASK);
376                 local_irq_save(flags);
377                 retval = (mask & *a) != 0;
378                 *a &= ~mask;
379                 local_irq_restore(flags);
380
381                 return retval;
382         }
383
384         smp_mb();
385 }
386
387 /*
388  * test_and_change_bit - Change a bit and return its old value
389  * @nr: Bit to change
390  * @addr: Address to count from
391  *
392  * This operation is atomic and cannot be reordered.
393  * It also implies a memory barrier.
394  */
395 static inline int test_and_change_bit(unsigned long nr,
396         volatile unsigned long *addr)
397 {
398         if (cpu_has_llsc && R10000_LLSC_WAR) {
399                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
400                 unsigned long temp, res;
401
402                 __asm__ __volatile__(
403                 "       .set    mips3                                   \n"
404                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
405                 "       xor     %2, %0, %3                              \n"
406                 "       " __SC  "%2, %1                                 \n"
407                 "       beqzl   %2, 1b                                  \n"
408                 "       and     %2, %0, %3                              \n"
409                 "       .set    mips0                                   \n"
410                 : "=&r" (temp), "=m" (*m), "=&r" (res)
411                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
412                 : "memory");
413
414                 return res != 0;
415         } else if (cpu_has_llsc) {
416                 unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG);
417                 unsigned long temp, res;
418
419                 __asm__ __volatile__(
420                 "       .set    push                                    \n"
421                 "       .set    noreorder                               \n"
422                 "       .set    mips3                                   \n"
423                 "1:     " __LL  "%0, %1         # test_and_change_bit   \n"
424                 "       xor     %2, %0, %3                              \n"
425                 "       " __SC  "\t%2, %1                               \n"
426                 "       beqz    %2, 2f                                  \n"
427                 "        and    %2, %0, %3                              \n"
428                 "       .subsection 2                                   \n"
429                 "2:     b       1b                                      \n"
430                 "        nop                                            \n"
431                 "       .previous                                       \n"
432                 "       .set    pop                                     \n"
433                 : "=&r" (temp), "=m" (*m), "=&r" (res)
434                 : "r" (1UL << (nr & SZLONG_MASK)), "m" (*m)
435                 : "memory");
436
437                 return res != 0;
438         } else {
439                 volatile unsigned long *a = addr;
440                 unsigned long mask, retval;
441                 unsigned long flags;
442
443                 a += nr >> SZLONG_LOG;
444                 mask = 1UL << (nr & SZLONG_MASK);
445                 local_irq_save(flags);
446                 retval = (mask & *a) != 0;
447                 *a ^= mask;
448                 local_irq_restore(flags);
449
450                 return retval;
451         }
452
453         smp_mb();
454 }
455
456 #include <asm-generic/bitops/non-atomic.h>
457
458 /*
459  * Return the bit position (0..63) of the most significant 1 bit in a word
460  * Returns -1 if no 1 bit exists
461  */
462 static inline int __ilog2(unsigned long x)
463 {
464         int lz;
465
466         if (sizeof(x) == 4) {
467                 __asm__ (
468                 "       .set    push                                    \n"
469                 "       .set    mips32                                  \n"
470                 "       clz     %0, %1                                  \n"
471                 "       .set    pop                                     \n"
472                 : "=r" (lz)
473                 : "r" (x));
474
475                 return 31 - lz;
476         }
477
478         BUG_ON(sizeof(x) != 8);
479
480         __asm__ (
481         "       .set    push                                            \n"
482         "       .set    mips64                                          \n"
483         "       dclz    %0, %1                                          \n"
484         "       .set    pop                                             \n"
485         : "=r" (lz)
486         : "r" (x));
487
488         return 63 - lz;
489 }
490
491 #if defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64)
492
493 /*
494  * __ffs - find first bit in word.
495  * @word: The word to search
496  *
497  * Returns 0..SZLONG-1
498  * Undefined if no bit exists, so code should check against 0 first.
499  */
500 static inline unsigned long __ffs(unsigned long word)
501 {
502         return __ilog2(word & -word);
503 }
504
505 /*
506  * fls - find last bit set.
507  * @word: The word to search
508  *
509  * This is defined the same way as ffs.
510  * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
511  */
512 static inline int fls(int word)
513 {
514         __asm__ ("clz %0, %1" : "=r" (word) : "r" (word));
515
516         return 32 - word;
517 }
518
519 #if defined(CONFIG_64BIT) && defined(CONFIG_CPU_MIPS64)
520 static inline int fls64(__u64 word)
521 {
522         __asm__ ("dclz %0, %1" : "=r" (word) : "r" (word));
523
524         return 64 - word;
525 }
526 #else
527 #include <asm-generic/bitops/fls64.h>
528 #endif
529
530 /*
531  * ffs - find first bit set.
532  * @word: The word to search
533  *
534  * This is defined the same way as
535  * the libc and compiler builtin ffs routines, therefore
536  * differs in spirit from the above ffz (man ffs).
537  */
538 static inline int ffs(int word)
539 {
540         if (!word)
541                 return 0;
542
543         return fls(word & -word);
544 }
545
546 #else
547
548 #include <asm-generic/bitops/__ffs.h>
549 #include <asm-generic/bitops/ffs.h>
550 #include <asm-generic/bitops/fls.h>
551 #include <asm-generic/bitops/fls64.h>
552
553 #endif /*defined(CONFIG_CPU_MIPS32) || defined(CONFIG_CPU_MIPS64) */
554
555 #include <asm-generic/bitops/ffz.h>
556 #include <asm-generic/bitops/find.h>
557
558 #ifdef __KERNEL__
559
560 #include <asm-generic/bitops/sched.h>
561 #include <asm-generic/bitops/hweight.h>
562 #include <asm-generic/bitops/ext2-non-atomic.h>
563 #include <asm-generic/bitops/ext2-atomic.h>
564 #include <asm-generic/bitops/minix.h>
565
566 #endif /* __KERNEL__ */
567
568 #endif /* _ASM_BITOPS_H */