]> git.ipfire.org Git - people/ms/linux.git/blobdiff - arch/mips/include/asm/atomic.h
Importing "grsecurity-3.1-3.19.2-201503201903.patch"
[people/ms/linux.git] / arch / mips / include / asm / atomic.h
index 857da84cfc92eb20bd7f29cb5d9b3c1e16b86203..3f4458b3e58ec93a58cb02915dc741818d03381b 100644 (file)
 #include <asm/cmpxchg.h>
 #include <asm/war.h>
 
+#ifdef CONFIG_GENERIC_ATOMIC64
+#include <asm-generic/atomic64.h>
+#endif
+
 #define ATOMIC_INIT(i)   { (i) }
 
+#ifdef CONFIG_64BIT
+#define _ASM_EXTABLE(from, to)         \
+"      .section __ex_table,\"a\"\n"    \
+"      .dword  " #from ", " #to"\n"    \
+"      .previous\n"
+#else
+#define _ASM_EXTABLE(from, to)         \
+"      .section __ex_table,\"a\"\n"    \
+"      .word   " #from ", " #to"\n"    \
+"      .previous\n"
+#endif
+
 /*
  * atomic_read - read atomic variable
  * @v: pointer of type atomic_t
  *
  * Atomically reads the value of @v.
  */
-#define atomic_read(v)         ACCESS_ONCE((v)->counter)
+static inline int atomic_read(const atomic_t *v)
+{
+       return ACCESS_ONCE(v->counter);
+}
+
+static inline int atomic_read_unchecked(const atomic_unchecked_t *v)
+{
+       return ACCESS_ONCE(v->counter);
+}
 
 /*
  * atomic_set - set atomic variable
  *
  * Atomically sets the value of @v to @i.
  */
-#define atomic_set(v, i)               ((v)->counter = (i))
+static inline void atomic_set(atomic_t *v, int i)
+{
+       v->counter = i;
+}
 
-#define ATOMIC_OP(op, c_op, asm_op)                                          \
-static __inline__ void atomic_##op(int i, atomic_t * v)                              \
+static inline void atomic_set_unchecked(atomic_unchecked_t *v, int i)
+{
+       v->counter = i;
+}
+
+#ifdef CONFIG_PAX_REFCOUNT
+#define __OVERFLOW_POST                                \
+       "       b       4f              \n"     \
+       "       .set    noreorder       \n"     \
+       "3:     b       5f              \n"     \
+       "       move    %0, %1          \n"     \
+       "       .set    reorder         \n"
+#define __OVERFLOW_EXTABLE     \
+       "3:\n"                  \
+       _ASM_EXTABLE(2b, 3b)
+#else
+#define __OVERFLOW_POST
+#define __OVERFLOW_EXTABLE
+#endif
+
+#define __ATOMIC_OP(op, suffix, asm_op, extable)                             \
+static inline void atomic_##op##suffix(int i, atomic##suffix##_t * v)        \
 {                                                                            \
        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
                int temp;                                                     \
                                                                              \
                __asm__ __volatile__(                                         \
-               "       .set    arch=r4000                              \n"   \
-               "1:     ll      %0, %1          # atomic_" #op "        \n"   \
-               "       " #asm_op " %0, %2                              \n"   \
+               "       .set    mips3                                   \n"   \
+               "1:     ll      %0, %1          # atomic_" #op #suffix "\n"   \
+               "2:     " #asm_op " %0, %2                              \n"   \
                "       sc      %0, %1                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
+               extable                                                       \
                "       .set    mips0                                   \n"   \
                : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                int temp;                                                     \
                                                                              \
-               do {                                                          \
-                       __asm__ __volatile__(                                 \
-                       "       .set    arch=r4000                      \n"   \
-                       "       ll      %0, %1          # atomic_" #op "\n"   \
-                       "       " #asm_op " %0, %2                      \n"   \
-                       "       sc      %0, %1                          \n"   \
-                       "       .set    mips0                           \n"   \
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
-                       : "Ir" (i));                                          \
-               } while (unlikely(!temp));                                    \
+               __asm__ __volatile__(                                         \
+               "       .set    mips3                                   \n"   \
+               "1:     ll      %0, %1          # atomic_" #op #suffix "\n"   \
+               "2:     " #asm_op " %0, %2                              \n"   \
+               "       sc      %0, %1                                  \n"   \
+               "       beqz    %0, 1b                                  \n"   \
+                       extable                                               \
+               "       .set    mips0                                   \n"   \
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
+               : "Ir" (i));                                                  \
        } else {                                                              \
                unsigned long flags;                                          \
                                                                              \
                raw_local_irq_save(flags);                                    \
-               v->counter c_op i;                                            \
+               __asm__ __volatile__(                                         \
+               "2:     " #asm_op " %0, %1                              \n"   \
+               extable                                                       \
+               : "+r" (v->counter) : "Ir" (i));                              \
                raw_local_irq_restore(flags);                                 \
        }                                                                     \
 }
 
-#define ATOMIC_OP_RETURN(op, c_op, asm_op)                                   \
-static __inline__ int atomic_##op##_return(int i, atomic_t * v)                      \
+#define ATOMIC_OP(op, asm_op) __ATOMIC_OP(op, , asm_op##u)                   \
+                             __ATOMIC_OP(op, _unchecked, asm_op)
+
+#define __ATOMIC_OP_RETURN(op, suffix, asm_op, post_op, extable)             \
+static inline int atomic_##op##_return##suffix(int i, atomic##suffix##_t * v) \
 {                                                                            \
        int result;                                                           \
                                                                              \
@@ -89,12 +143,15 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                  \
                int temp;                                                     \
                                                                              \
                __asm__ __volatile__(                                         \
-               "       .set    arch=r4000                              \n"   \
-               "1:     ll      %1, %2          # atomic_" #op "_return \n"   \
-               "       " #asm_op " %0, %1, %3                          \n"   \
+               "       .set    mips3                                   \n"   \
+               "1:     ll      %1, %2  # atomic_" #op "_return" #suffix"\n"  \
+               "2:     " #asm_op " %0, %1, %3                          \n"   \
                "       sc      %0, %2                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
-               "       " #asm_op " %0, %1, %3                          \n"   \
+               post_op                                                       \
+               extable                                                       \
+               "4:     " #asm_op " %0, %1, %3                          \n"   \
+               "5:                                                     \n"   \
                "       .set    mips0                                   \n"   \
                : "=&r" (result), "=&r" (temp),                               \
                  "+" GCC_OFF12_ASM() (v->counter)                            \
@@ -102,26 +159,33 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                 \
        } else if (kernel_uses_llsc) {                                        \
                int temp;                                                     \
                                                                              \
-               do {                                                          \
-                       __asm__ __volatile__(                                 \
-                       "       .set    arch=r4000                      \n"   \
-                       "       ll      %1, %2  # atomic_" #op "_return \n"   \
-                       "       " #asm_op " %0, %1, %3                  \n"   \
-                       "       sc      %0, %2                          \n"   \
-                       "       .set    mips0                           \n"   \
-                       : "=&r" (result), "=&r" (temp),                       \
-                         "+" GCC_OFF12_ASM() (v->counter)                    \
-                       : "Ir" (i));                                          \
-               } while (unlikely(!result));                                  \
+               __asm__ __volatile__(                                         \
+               "       .set    mips3                                   \n"   \
+               "1:     ll      %1, %2  # atomic_" #op "_return" #suffix "\n" \
+               "2:     " #asm_op " %0, %1, %3                          \n"   \
+               "       sc      %0, %2                                  \n"   \
+               post_op                                                       \
+               extable                                                       \
+               "4:     " #asm_op " %0, %1, %3                          \n"   \
+               "5:                                                     \n"   \
+               "       .set    mips0                                   \n"   \
+               : "=&r" (result), "=&r" (temp),                               \
+                 "+" GCC_OFF12_ASM() (v->counter)                            \
+               : "Ir" (i));                                                  \
                                                                              \
                result = temp; result c_op i;                                 \
        } else {                                                              \
                unsigned long flags;                                          \
                                                                              \
                raw_local_irq_save(flags);                                    \
-               result = v->counter;                                          \
-               result c_op i;                                                \
-               v->counter = result;                                          \
+               __asm__ __volatile__(                                         \
+               "       lw      %0, %1                                  \n"   \
+               "2:     " #asm_op " %0, %1, %2                          \n"   \
+               "       sw      %0, %1                                  \n"   \
+               "3:                                                     \n"   \
+               extable                                                       \
+               : "=&r" (result), "+" GCC_OFF12_ASM() (v->counter)            \
+               : "Ir" (i));                                                  \
                raw_local_irq_restore(flags);                                 \
        }                                                                     \
                                                                              \
@@ -130,16 +194,21 @@ static __inline__ int atomic_##op##_return(int i, atomic_t * v)                 \
        return result;                                                        \
 }
 
-#define ATOMIC_OPS(op, c_op, asm_op)                                         \
-       ATOMIC_OP(op, c_op, asm_op)                                           \
-       ATOMIC_OP_RETURN(op, c_op, asm_op)
+#define ATOMIC_OP_RETURN(op, asm_op) __ATOMIC_OP_RETURN(op, , asm_op##u, , __OVERFLOW_EXTABLE) \
+                                    __ATOMIC_OP_RETURN(op, _unchecked, asm_op, __OVERFLOW_POST, __OVERFLOW_EXTABLE)
+
+#define ATOMIC_OPS(op, asm_op)                                               \
+       ATOMIC_OP(op, asm_op)                                                 \
+       ATOMIC_OP_RETURN(op, asm_op)
 
-ATOMIC_OPS(add, +=, addu)
-ATOMIC_OPS(sub, -=, subu)
+ATOMIC_OPS(add, add)
+ATOMIC_OPS(sub, sub)
 
 #undef ATOMIC_OPS
 #undef ATOMIC_OP_RETURN
+#undef __ATOMIC_OP_RETURN
 #undef ATOMIC_OP
+#undef __ATOMIC_OP
 
 /*
  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
@@ -149,7 +218,7 @@ ATOMIC_OPS(sub, -=, subu)
  * Atomically test @v and subtract @i if @v is greater or equal than @i.
  * The function returns the old value of @v minus @i.
  */
-static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
+static __inline__ int atomic_sub_if_positive(int i, atomic_t *v)
 {
        int result;
 
@@ -208,8 +277,26 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
        return result;
 }
 
-#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
-#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
+static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
+{
+       return cmpxchg(&v->counter, old, new);
+}
+
+static inline int atomic_cmpxchg_unchecked(atomic_unchecked_t *v, int old,
+                                          int new)
+{
+       return cmpxchg(&(v->counter), old, new);
+}
+
+static inline int atomic_xchg(atomic_t *v, int new)
+{
+       return xchg(&v->counter, new);
+}
+
+static inline int atomic_xchg_unchecked(atomic_unchecked_t *v, int new)
+{
+       return xchg(&(v->counter), new);
+}
 
 /**
  * __atomic_add_unless - add unless the number is a given value
@@ -237,6 +324,10 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
 
 #define atomic_dec_return(v) atomic_sub_return(1, (v))
 #define atomic_inc_return(v) atomic_add_return(1, (v))
+static __inline__ int atomic_inc_return_unchecked(atomic_unchecked_t *v)
+{
+       return atomic_add_return_unchecked(1, v);
+}
 
 /*
  * atomic_sub_and_test - subtract value from variable and test result
@@ -258,6 +349,10 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  * other cases.
  */
 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
+static __inline__ int atomic_inc_and_test_unchecked(atomic_unchecked_t *v)
+{
+       return atomic_add_return_unchecked(1, v) == 0;
+}
 
 /*
  * atomic_dec_and_test - decrement by 1 and test
@@ -282,6 +377,10 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  * Atomically increments @v by 1.
  */
 #define atomic_inc(v) atomic_add(1, (v))
+static __inline__ void atomic_inc_unchecked(atomic_unchecked_t *v)
+{
+       atomic_add_unchecked(1, v);
+}
 
 /*
  * atomic_dec - decrement and test
@@ -290,6 +389,10 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  * Atomically decrements @v by 1.
  */
 #define atomic_dec(v) atomic_sub(1, (v))
+static __inline__ void atomic_dec_unchecked(atomic_unchecked_t *v)
+{
+       atomic_sub_unchecked(1, v);
+}
 
 /*
  * atomic_add_negative - add and test if negative
@@ -311,54 +414,77 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  * @v: pointer of type atomic64_t
  *
  */
-#define atomic64_read(v)       ACCESS_ONCE((v)->counter)
+static inline long atomic64_read(const atomic64_t *v)
+{
+       return ACCESS_ONCE(v->counter);
+}
+
+static inline long atomic64_read_unchecked(const atomic64_unchecked_t *v)
+{
+       return ACCESS_ONCE(v->counter);
+}
 
 /*
  * atomic64_set - set atomic variable
  * @v: pointer of type atomic64_t
  * @i: required value
  */
-#define atomic64_set(v, i)     ((v)->counter = (i))
+static inline void atomic64_set(atomic64_t *v, long i)
+{
+       v->counter = i;
+}
+
+static inline void atomic64_set_unchecked(atomic64_unchecked_t *v, long i)
+{
+       v->counter = i;
+}
 
-#define ATOMIC64_OP(op, c_op, asm_op)                                        \
-static __inline__ void atomic64_##op(long i, atomic64_t * v)                 \
+#define __ATOMIC64_OP(op, suffix, asm_op, extable)                           \
+static inline void atomic64_##op##suffix(long i, atomic64##suffix##_t * v)    \
 {                                                                            \
        if (kernel_uses_llsc && R10000_LLSC_WAR) {                            \
                long temp;                                                    \
                                                                              \
                __asm__ __volatile__(                                         \
-               "       .set    arch=r4000                              \n"   \
-               "1:     lld     %0, %1          # atomic64_" #op "      \n"   \
-               "       " #asm_op " %0, %2                              \n"   \
+               "       .set    mips3                                   \n"   \
+               "1:     lld     %0, %1          # atomic64_" #op #suffix "\n" \
+               "2:     " #asm_op " %0, %2                              \n"   \
                "       scd     %0, %1                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
+               extable                                                       \
                "       .set    mips0                                   \n"   \
                : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
                : "Ir" (i));                                                  \
        } else if (kernel_uses_llsc) {                                        \
                long temp;                                                    \
                                                                              \
-               do {                                                          \
-                       __asm__ __volatile__(                                 \
-                       "       .set    arch=r4000                      \n"   \
-                       "       lld     %0, %1          # atomic64_" #op "\n" \
-                       "       " #asm_op " %0, %2                      \n"   \
-                       "       scd     %0, %1                          \n"   \
-                       "       .set    mips0                           \n"   \
-                       : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)      \
-                       : "Ir" (i));                                          \
-               } while (unlikely(!temp));                                    \
+               __asm__ __volatile__(                                         \
+               "       .set    mips3                                   \n"   \
+               "1:     lld     %0, %1          # atomic64_" #op #suffix "\n" \
+               "2:     " #asm_op " %0, %2                              \n"   \
+               "       scd     %0, %1                                  \n"   \
+               "       beqz    %0, 1b                                  \n"   \
+                       extable                                               \
+               "       .set    mips0                                   \n"   \
+               : "=&r" (temp), "+" GCC_OFF12_ASM() (v->counter)              \
+               : "Ir" (i));                                                  \
        } else {                                                              \
                unsigned long flags;                                          \
                                                                              \
                raw_local_irq_save(flags);                                    \
-               v->counter c_op i;                                            \
+               __asm__ __volatile__(                                         \
+               "2:     " #asm_op " %0, %1                              \n"   \
+               extable                                                       \
+               : "+" GCC_OFF12_ASM() (v->counter) : "Ir" (i));               \
                raw_local_irq_restore(flags);                                 \
        }                                                                     \
 }
 
-#define ATOMIC64_OP_RETURN(op, c_op, asm_op)                                 \
-static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)        \
+#define ATOMIC64_OP(op, asm_op) __ATOMIC64_OP(op, , asm_op##u)               \
+                               __ATOMIC64_OP(op, _unchecked, asm_op)
+
+#define __ATOMIC64_OP_RETURN(op, suffix, asm_op, post_op, extable)           \
+static inline long atomic64_##op##_return##suffix(long i, atomic64##suffix##_t * v)\
 {                                                                            \
        long result;                                                          \
                                                                              \
@@ -368,12 +494,15 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)           \
                long temp;                                                    \
                                                                              \
                __asm__ __volatile__(                                         \
-               "       .set    arch=r4000                              \n"   \
+               "       .set    mips3                                   \n"   \
                "1:     lld     %1, %2          # atomic64_" #op "_return\n"  \
-               "       " #asm_op " %0, %1, %3                          \n"   \
+               "2:     " #asm_op " %0, %1, %3                          \n"   \
                "       scd     %0, %2                                  \n"   \
                "       beqzl   %0, 1b                                  \n"   \
-               "       " #asm_op " %0, %1, %3                          \n"   \
+               post_op                                                       \
+               extable                                                       \
+               "4:     " #asm_op " %0, %1, %3                          \n"   \
+               "5:                                                     \n"   \
                "       .set    mips0                                   \n"   \
                : "=&r" (result), "=&r" (temp),                               \
                  "+" GCC_OFF12_ASM() (v->counter)                            \
@@ -381,27 +510,35 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)           \
        } else if (kernel_uses_llsc) {                                        \
                long temp;                                                    \
                                                                              \
-               do {                                                          \
-                       __asm__ __volatile__(                                 \
-                       "       .set    arch=r4000                      \n"   \
-                       "       lld     %1, %2  # atomic64_" #op "_return\n"  \
-                       "       " #asm_op " %0, %1, %3                  \n"   \
-                       "       scd     %0, %2                          \n"   \
-                       "       .set    mips0                           \n"   \
-                       : "=&r" (result), "=&r" (temp),                       \
-                         "=" GCC_OFF12_ASM() (v->counter)                    \
-                       : "Ir" (i), GCC_OFF12_ASM() (v->counter)              \
-                       : "memory");                                          \
-               } while (unlikely(!result));                                  \
+               __asm__ __volatile__(                                         \
+               "       .set    mips3                                   \n"   \
+               "1:     lld     %1, %2  # atomic64_" #op "_return" #suffix "\n"\
+               "2:     " #asm_op " %0, %1, %3                          \n"   \
+               "       scd     %0, %2                                  \n"   \
+               "       beqz    %0, 1b                                  \n"   \
+               post_op                                                       \
+               extable                                                       \
+               "4:     " #asm_op " %0, %1, %3                          \n"   \
+               "5:                                                     \n"   \
+               "       .set    mips0                                   \n"   \
+               : "=&r" (result), "=&r" (temp),                               \
+                 "=" GCC_OFF12_ASM() (v->counter)                            \
+               : "Ir" (i), GCC_OFF12_ASM() (v->counter)                      \
+               : "memory");                                                  \
                                                                              \
                result = temp; result c_op i;                                 \
        } else {                                                              \
                unsigned long flags;                                          \
                                                                              \
                raw_local_irq_save(flags);                                    \
-               result = v->counter;                                          \
-               result c_op i;                                                \
-               v->counter = result;                                          \
+               __asm__ __volatile__(                                         \
+               "       ld      %0, %1                                  \n"   \
+               "2:     " #asm_op " %0, %1, %2                          \n"   \
+               "       sd      %0, %1                                  \n"   \
+               "3:                                                     \n"   \
+               extable                                                       \
+               : "=&r" (result), "+" GCC_OFF12_ASM() (v->counter)            \
+               : "Ir" (i));                                                  \
                raw_local_irq_restore(flags);                                 \
        }                                                                     \
                                                                              \
@@ -410,16 +547,23 @@ static __inline__ long atomic64_##op##_return(long i, atomic64_t * v)           \
        return result;                                                        \
 }
 
-#define ATOMIC64_OPS(op, c_op, asm_op)                                       \
-       ATOMIC64_OP(op, c_op, asm_op)                                         \
-       ATOMIC64_OP_RETURN(op, c_op, asm_op)
+#define ATOMIC64_OP_RETURN(op, asm_op) __ATOMIC64_OP_RETURN(op, , asm_op##u, , __OVERFLOW_EXTABLE)     \
+                                      __ATOMIC64_OP_RETURN(op, _unchecked, asm_op, __OVERFLOW_POST, __OVERFLOW_EXTABLE)
 
-ATOMIC64_OPS(add, +=, daddu)
-ATOMIC64_OPS(sub, -=, dsubu)
+#define ATOMIC64_OPS(op, asm_op)                                               \
+       ATOMIC64_OP(op, asm_op)                                                 \
+       ATOMIC64_OP_RETURN(op, asm_op)
+
+ATOMIC64_OPS(add, dadd)
+ATOMIC64_OPS(sub, dsub)
 
 #undef ATOMIC64_OPS
 #undef ATOMIC64_OP_RETURN
+#undef __ATOMIC64_OP_RETURN
 #undef ATOMIC64_OP
+#undef __ATOMIC64_OP
+#undef __OVERFLOW_EXTABLE
+#undef __OVERFLOW_POST
 
 /*
  * atomic64_sub_if_positive - conditionally subtract integer from atomic
@@ -430,7 +574,7 @@ ATOMIC64_OPS(sub, -=, dsubu)
  * Atomically test @v and subtract @i if @v is greater or equal than @i.
  * The function returns the old value of @v minus @i.
  */
-static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
+static __inline__ long atomic64_sub_if_positive(long i, atomic64_t *v)
 {
        long result;
 
@@ -489,9 +633,26 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
        return result;
 }
 
-#define atomic64_cmpxchg(v, o, n) \
-       ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
-#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
+static inline long atomic64_cmpxchg(atomic64_t *v, long old, long new)
+{
+       return cmpxchg(&v->counter, old, new);
+}
+
+static inline long atomic64_cmpxchg_unchecked(atomic64_unchecked_t *v, long old,
+                                             long new)
+{
+       return cmpxchg(&(v->counter), old, new);
+}
+
+static inline long atomic64_xchg(atomic64_t *v, long new)
+{
+       return xchg(&v->counter, new);
+}
+
+static inline long atomic64_xchg_unchecked(atomic64_unchecked_t *v, long new)
+{
+       return xchg(&(v->counter), new);
+}
 
 /**
  * atomic64_add_unless - add unless the number is a given value
@@ -521,6 +682,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 
 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
+#define atomic64_inc_return_unchecked(v) atomic64_add_return_unchecked(1, (v))
 
 /*
  * atomic64_sub_and_test - subtract value from variable and test result
@@ -542,6 +704,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  * other cases.
  */
 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
+#define atomic64_inc_and_test_unchecked(v) atomic64_add_return_unchecked(1, (v)) == 0)
 
 /*
  * atomic64_dec_and_test - decrement by 1 and test
@@ -566,6 +729,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  * Atomically increments @v by 1.
  */
 #define atomic64_inc(v) atomic64_add(1, (v))
+#define atomic64_inc_unchecked(v) atomic64_add_unchecked(1, (v))
 
 /*
  * atomic64_dec - decrement and test
@@ -574,6 +738,7 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
  * Atomically decrements @v by 1.
  */
 #define atomic64_dec(v) atomic64_sub(1, (v))
+#define atomic64_dec_unchecked(v) atomic64_sub_unchecked(1, (v))
 
 /*
  * atomic64_add_negative - add and test if negative