*/
#include <stdio.h>
-#include "internal/cryptlib.h"
#include <openssl/asn1t.h>
+#include "crypto/cryptlib.h"
#define COPY_SIZE(a, b) (sizeof(a) < sizeof(b) ? sizeof(a) : sizeof(b))
memcpy(pval, &it->size, COPY_SIZE(*pval, it->size));
}
-/*
- * Originally BN_num_bits_word was called to perform this operation, but
- * trouble is that there is no guarantee that sizeof(long) equals to
- * sizeof(BN_ULONG). BN_ULONG is a configurable type that can be as wide
- * as long, but also double or half...
- */
-static int num_bits_ulong(unsigned long value)
-{
- size_t i;
- unsigned long ret = 0;
-
- /*
- * It is argued that *on average* constant counter loop performs
- * not worse [if not better] than one with conditional break or
- * mask-n-table-lookup-style, because of branch misprediction
- * penalties.
- */
- for (i = 0; i < sizeof(value) * 8; i++) {
- ret += (value != 0);
- value >>= 1;
- }
-
- return (int)ret;
-}
-
static int long_i2c(const ASN1_VALUE **pval, unsigned char *cont, int *putype,
const ASN1_ITEM *it)
{
sign = 0;
utmp = ltmp;
}
- clen = num_bits_ulong(utmp);
+ clen = (int)ossl_num_bits(utmp);
/* If MSB of leading octet set we need to pad */
if (!(clen & 0x7))
pad = 1;
threads_pthread.c threads_win.c threads_none.c threads_common.c \
initthread.c context.c sparse_array.c asn1_dsa.c packet.c \
param_build.c param_build_set.c der_writer.c threads_lib.c \
- params_dup.c time.c array_alloc.c aligned_alloc.c deterministic_nonce.c
+ params_dup.c time.c array_alloc.c aligned_alloc.c deterministic_nonce.c \
+ int.c
SOURCE[../libcrypto]=$UTIL_COMMON \
mem.c mem_sec.c \
--- /dev/null
+/*
+ * Copyright 2025 The OpenSSL Project Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License 2.0 (the "License"). You may not use
+ * this file except in compliance with the License. You can obtain a copy
+ * in the file LICENSE in the source distribution or at
+ * https://www.openssl.org/source/license.html
+ */
+
+#include "crypto/cryptlib.h"
+
+size_t ossl_num_bits(size_t value)
+{
+ size_t i;
+ unsigned long ret = 0;
+
+ /*
+ * It is argued that *on average* constant counter loop performs
+ * not worse [if not better] than one with conditional break or
+ * mask-n-table-lookup-style, because of branch misprediction
+ * penalties.
+ */
+ for (i = 0; i < sizeof(value) * 8; i++) {
+ ret += (value != 0);
+ value >>= 1;
+ }
+
+ return (int)ret;
+}
int ossl_crypto_alloc_ex_data_intern(int class_index, void *obj,
CRYPTO_EX_DATA *ad, int idx);
+size_t ossl_num_bits(size_t value);
+
#endif /* OSSL_CRYPTO_CRYPTLIB_H */