# define HAVE_ATOMICS 1
-typedef _Atomic int CRYPTO_REF_COUNT;
+typedef struct {
+ _Atomic int val;
+} CRYPTO_REF_COUNT;
-static inline int CRYPTO_UP_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_add_explicit(val, 1, memory_order_relaxed) + 1;
+ *ret = atomic_fetch_add_explicit(&refcnt->val, 1, memory_order_relaxed) + 1;
return 1;
}
* to mutable members doesn't have to be serialized anymore, which would
* otherwise imply an acquire fence. Hence conditional acquire fence...
*/
-static inline int CRYPTO_DOWN_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_fetch_sub_explicit(val, 1, memory_order_relaxed) - 1;
+ *ret = atomic_fetch_sub_explicit(&refcnt->val, 1, memory_order_relaxed) - 1;
if (*ret == 0)
atomic_thread_fence(memory_order_acquire);
return 1;
}
-static inline int CRYPTO_GET_REF(_Atomic int *val, int *ret,
- ossl_unused void *lock)
+static inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = atomic_load_explicit(val, memory_order_relaxed);
+ *ret = atomic_load_explicit(&refcnt->val, memory_order_relaxed);
return 1;
}
# define HAVE_ATOMICS 1
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+} CRYPTO_REF_COUNT;
-static __inline__ int CRYPTO_UP_REF(int *val, int *ret, ossl_unused void *lock)
+static __inline__ int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_add(val, 1, __ATOMIC_RELAXED) + 1;
+ *ret = __atomic_fetch_add(&refcnt->val, 1, __ATOMIC_RELAXED) + 1;
return 1;
}
-static __inline__ int CRYPTO_DOWN_REF(int *val, int *ret,
- ossl_unused void *lock)
+static __inline__ int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_fetch_sub(val, 1, __ATOMIC_RELAXED) - 1;
+ *ret = __atomic_fetch_sub(&refcnt->val, 1, __ATOMIC_RELAXED) - 1;
if (*ret == 0)
__atomic_thread_fence(__ATOMIC_ACQUIRE);
return 1;
}
-static __inline__ int CRYPTO_GET_REF(int *val, int *ret, ossl_unused void *lock)
+static __inline__ int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = __atomic_load_n(val, __ATOMIC_RELAXED);
+ *ret = __atomic_load_n(&refcnt->val, __ATOMIC_RELAXED);
return 1;
}
# elif defined(__ICL) && defined(_WIN32)
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
+
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd((void *)val, 1) + 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *val, int *refcnt)
{
- *ret = _InterlockedExchangeAdd((void *)val, -1) - 1;
+ *ret = _InterlockedExchangeAdd((void *)&refcnt->val, -1) - 1;
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedOr((void *)val, 0);
+ *ret = _InterlockedOr((void *)&refcnt->val, 0);
return 1;
}
# define HAVE_ATOMICS 1
-typedef volatile int CRYPTO_REF_COUNT;
+typedef struct {
+ volatile int val;
+} CRYPTO_REF_COUNT;
# if (defined(_M_ARM) && _M_ARM>=7 && !defined(_WIN32_WCE)) || defined(_M_ARM64)
# include <intrin.h>
# define _ARM_BARRIER_ISH _ARM64_BARRIER_ISH
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, 1) + 1;
+ *ret = _InterlockedExchangeAdd_nf(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd_nf(val, -1) - 1;
+ *ret = _InterlockedExchangeAdd_nf(&refcnt->val, -1) - 1;
if (*ret == 0)
__dmb(_ARM_BARRIER_ISH);
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedOr_nf((void *)val, 0);
+ *ret = _InterlockedOr_nf((void *)&refcnt->val, 0);
return 1;
}
# endif
# endif
-static __inline int CRYPTO_UP_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, 1) + 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 1) + 1;
return 1;
}
-static __inline int CRYPTO_DOWN_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, -1) - 1;
+ *ret = _InterlockedExchangeAdd(&refcnt->val, -1) - 1;
return 1;
}
-static __inline int CRYPTO_GET_REF(volatile int *val, int *ret,
- ossl_unused void *lock)
+static __inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt, int *ret)
{
- *ret = _InterlockedExchangeAdd(val, 0);
+ *ret = _InterlockedExchangeAdd(&refcnt->val, 0);
return 1;
}
*/
# ifndef HAVE_ATOMICS
-typedef int CRYPTO_REF_COUNT;
+typedef struct {
+ int val;
+# ifdef OPENSSL_THREADS
+ CRYPTO_RWLOCK *lock;
+# endif
+} CRYPTO_REF_COUNT;
+
+# ifdef OPENSSL_THREADS
+
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, 1, ret, refcnt->lock);
+}
+
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_add(&refcnt->val, -1, ret, refcnt->lock);
+}
+
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ return CRYPTO_atomic_load_int(&refcnt->val, ret, refcnt->lock);
+}
+
+# define CRYPTO_NEW_FREE_DEFINED 1
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ refcnt->lock = CRYPTO_THREAD_lock_new();
+ if (refcnt->lock == NULL) {
+ ERR_raise(ERR_LIB_CRYPTO, ERR_R_CRYPTO_LIB);
+ return 0;
+ }
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+ CRYPTO_THREAD_lock_free(refcnt->lock);
+}
+
+# else /* OPENSSL_THREADS */
+
+static ossl_unused ossl_inline int CRYPTO_UP_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val++;
+ *ret = refcnt->val;
+ return 1;
+}
-# define CRYPTO_UP_REF(val, ret, lock) CRYPTO_atomic_add(val, 1, ret, lock)
-# define CRYPTO_DOWN_REF(val, ret, lock) CRYPTO_atomic_add(val, -1, ret, lock)
-# define CRYPTO_GET_REF(val, ret, lock) CRYPTO_atomic_load_int(val, ret, lock)
+static ossl_unused ossl_inline int CRYPTO_DOWN_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ refcnt->val--;
+ *ret = refcnt->val;
+ return 1;
+}
+static ossl_unused ossl_inline int CRYPTO_GET_REF(CRYPTO_REF_COUNT *refcnt,
+ int *ret)
+{
+ *ret = refcnt->val;
+ return 1;
+}
+
+# endif /* OPENSSL_THREADS */
# endif
+# ifndef CRYPTO_NEW_FREE_DEFINED
+static ossl_unused ossl_inline int CRYPTO_NEW_REF(CRYPTO_REF_COUNT *refcnt, int n)
+{
+ refcnt->val = n;
+ return 1;
+}
+
+static ossl_unused ossl_inline void CRYPTO_FREE_REF(CRYPTO_REF_COUNT *refcnt) \
+{
+}
+# endif /* CRYPTO_NEW_FREE_DEFINED */
+#undef CRYPTO_NEW_FREE_DEFINED
+
# if !defined(NDEBUG) && !defined(OPENSSL_NO_STDIO)
# define REF_ASSERT_ISNT(test) \
(void)((test) ? (OPENSSL_die("refcount error", __FILE__, __LINE__), 1) : 0)
# define REF_PRINT_EX(text, count, object) \
OSSL_TRACE3(REF_COUNT, "%p:%4d:%s\n", (object), (count), (text));
# define REF_PRINT_COUNT(text, object) \
- REF_PRINT_EX(text, object->references, (void *)object)
+ REF_PRINT_EX(text, object->references.val, (void *)object)
#endif