]>
Commit | Line | Data |
---|---|---|
db9ecf05 | 1 | /* SPDX-License-Identifier: LGPL-2.1-or-later */ |
b5efdb8a | 2 | |
e5905427 | 3 | #include <malloc.h> |
11c3a366 TA |
4 | #include <stdint.h> |
5 | #include <string.h> | |
6 | ||
b5efdb8a | 7 | #include "alloc-util.h" |
11c3a366 | 8 | #include "macro.h" |
0a970718 | 9 | #include "memory-util.h" |
b5efdb8a LP |
10 | |
11 | void* memdup(const void *p, size_t l) { | |
c165d97d | 12 | void *ret; |
b5efdb8a | 13 | |
c165d97d LP |
14 | assert(l == 0 || p); |
15 | ||
830464c3 | 16 | ret = malloc(l ?: 1); |
c165d97d LP |
17 | if (!ret) |
18 | return NULL; | |
19 | ||
550721c2 | 20 | return memcpy_safe(ret, p, l); |
c165d97d LP |
21 | } |
22 | ||
d40c54fe | 23 | void* memdup_suffix0(const void *p, size_t l) { |
c165d97d LP |
24 | void *ret; |
25 | ||
26 | assert(l == 0 || p); | |
27 | ||
28 | /* The same as memdup() but place a safety NUL byte after the allocated memory */ | |
b5efdb8a | 29 | |
e5e21a05 LP |
30 | if (_unlikely_(l == SIZE_MAX)) /* prevent overflow */ |
31 | return NULL; | |
32 | ||
c165d97d LP |
33 | ret = malloc(l + 1); |
34 | if (!ret) | |
b5efdb8a LP |
35 | return NULL; |
36 | ||
550721c2 YW |
37 | ((uint8_t*) ret)[l] = 0; |
38 | return memcpy_safe(ret, p, l); | |
b5efdb8a LP |
39 | } |
40 | ||
319a4f4b LP |
41 | void* greedy_realloc( |
42 | void **p, | |
43 | size_t need, | |
44 | size_t size) { | |
45 | ||
34fb408f | 46 | size_t newalloc; |
b5efdb8a LP |
47 | void *q; |
48 | ||
49 | assert(p); | |
b5efdb8a | 50 | |
319a4f4b LP |
51 | /* We use malloc_usable_size() for determining the current allocated size. On all systems we care |
52 | * about this should be safe to rely on. Should there ever arise the need to avoid relying on this we | |
53 | * can instead locally fall back to realloc() on every call, rounded up to the next exponent of 2 or | |
54 | * so. */ | |
55 | ||
56 | if (*p && (size == 0 || (MALLOC_SIZEOF_SAFE(*p) / size >= need))) | |
b5efdb8a LP |
57 | return *p; |
58 | ||
23964f7f LP |
59 | if (_unlikely_(need > SIZE_MAX/2)) /* Overflow check */ |
60 | return NULL; | |
23964f7f | 61 | newalloc = need * 2; |
319a4f4b | 62 | |
34fb408f | 63 | if (!MUL_ASSIGN_SAFE(&newalloc, size)) |
b5efdb8a | 64 | return NULL; |
319a4f4b | 65 | |
34fb408f MY |
66 | if (newalloc < 64) /* Allocate at least 64 bytes */ |
67 | newalloc = 64; | |
23964f7f | 68 | |
34fb408f | 69 | q = realloc(*p, newalloc); |
b5efdb8a LP |
70 | if (!q) |
71 | return NULL; | |
72 | ||
319a4f4b | 73 | return *p = q; |
b5efdb8a LP |
74 | } |
75 | ||
319a4f4b LP |
76 | void* greedy_realloc0( |
77 | void **p, | |
78 | size_t need, | |
79 | size_t size) { | |
80 | ||
81 | size_t before, after; | |
b5efdb8a LP |
82 | uint8_t *q; |
83 | ||
84 | assert(p); | |
b5efdb8a | 85 | |
319a4f4b | 86 | before = MALLOC_SIZEOF_SAFE(*p); /* malloc_usable_size() will return 0 on NULL input, as per docs */ |
b5efdb8a | 87 | |
319a4f4b | 88 | q = greedy_realloc(p, need, size); |
b5efdb8a LP |
89 | if (!q) |
90 | return NULL; | |
91 | ||
319a4f4b LP |
92 | after = MALLOC_SIZEOF_SAFE(q); |
93 | ||
94 | if (size == 0) /* avoid division by zero */ | |
95 | before = 0; | |
96 | else | |
97 | before = (before / size) * size; /* Round down */ | |
98 | ||
99 | if (after > before) | |
100 | memzero(q + before, after - before); | |
b5efdb8a LP |
101 | |
102 | return q; | |
103 | } | |
7929e180 | 104 | |
3f27ba99 DS |
105 | void* greedy_realloc_append( |
106 | void **p, | |
107 | size_t *n_p, | |
108 | const void *from, | |
109 | size_t n_from, | |
110 | size_t size) { | |
111 | ||
112 | uint8_t *q; | |
113 | ||
114 | assert(p); | |
115 | assert(n_p); | |
116 | assert(from || n_from == 0); | |
117 | ||
118 | if (n_from > SIZE_MAX - *n_p) | |
119 | return NULL; | |
120 | ||
121 | q = greedy_realloc(p, *n_p + n_from, size); | |
122 | if (!q) | |
123 | return NULL; | |
124 | ||
125 | memcpy_safe(q + *n_p * size, from, n_from * size); | |
126 | ||
127 | *n_p += n_from; | |
128 | ||
129 | return q; | |
130 | } | |
131 | ||
7929e180 SP |
132 | void *expand_to_usable(void *ptr, size_t newsize _unused_) { |
133 | return ptr; | |
134 | } |