_ha_aligned_alloc(__a, __s); \
})
+#undef ha_aligned_zalloc
+#define ha_aligned_zalloc(a,s) ({ \
+ size_t __a = (a); \
+ size_t __s = (s); \
+ static struct mem_stats _ __attribute__((used,__section__("mem_stats"),__aligned__(sizeof(void*)))) = { \
+ .caller = { \
+ .file = __FILE__, .line = __LINE__, \
+ .what = MEM_STATS_TYPE_MALLOC, \
+ .func = __func__, \
+ }, \
+ }; \
+ HA_WEAK(__start_mem_stats); \
+ HA_WEAK(__stop_mem_stats); \
+ _HA_ATOMIC_INC(&_.calls); \
+ _HA_ATOMIC_ADD(&_.size, __s); \
+ _ha_aligned_zalloc(__a, __s); \
+})
+
#undef ha_aligned_alloc_safe
#define ha_aligned_alloc_safe(a,s) ({ \
size_t __a = (a); \
_ha_aligned_alloc_safe(__a, __s); \
})
+#undef ha_aligned_zalloc_safe
+#define ha_aligned_zalloc_safe(a,s) ({ \
+ size_t __a = (a); \
+ size_t __s = (s); \
+ static struct mem_stats _ __attribute__((used,__section__("mem_stats"),__aligned__(sizeof(void*)))) = { \
+ .caller = { \
+ .file = __FILE__, .line = __LINE__, \
+ .what = MEM_STATS_TYPE_MALLOC, \
+ .func = __func__, \
+ }, \
+ }; \
+ HA_WEAK(__start_mem_stats); \
+ HA_WEAK(__stop_mem_stats); \
+ _HA_ATOMIC_INC(&_.calls); \
+ _HA_ATOMIC_ADD(&_.size, __s); \
+ _ha_aligned_zalloc_safe(__a, __s); \
+})
+
#undef ha_aligned_free
#define ha_aligned_free(x) ({ \
typeof(x) __x = (x); \
#define will_free(x, y) do { } while (0)
#define ha_aligned_alloc(a,s) _ha_aligned_alloc(a, s)
+#define ha_aligned_zalloc(a,s) _ha_aligned_zalloc(a, s)
#define ha_aligned_alloc_safe(a,s) _ha_aligned_alloc_safe(a, s)
+#define ha_aligned_zalloc_safe(a,s) _ha_aligned_zalloc_safe(a, s)
#define ha_aligned_free(p) _ha_aligned_free(p)
#define ha_aligned_free_size(p,s) _ha_aligned_free(p)
#endif
}
+/* Like above but zeroing the area */
+static inline void *_ha_aligned_zalloc(size_t alignment, size_t size)
+{
+ void *ret = _ha_aligned_alloc(alignment, size);
+
+ if (ret)
+ memset(ret, 0, size);
+ return ret;
+}
+
/* portable memalign(): tries to accommodate OS specificities, and may fall
* back to plain malloc() if not supported, meaning that alignment guarantees
* are only a performance bonus but not granted. The size will automatically be
return _ha_aligned_alloc(alignment, size);
}
+/* Like above but zeroing the area */
+static inline void *_ha_aligned_zalloc_safe(size_t alignment, size_t size)
+{
+ void *ret = _ha_aligned_alloc_safe(alignment, size);
+
+ if (ret)
+ memset(ret, 0, size);
+ return ret;
+}
+
/* To be used to free a pointer returned by _ha_aligned_alloc() or
* _ha_aligned_alloc_safe(). Please use ha_aligned_free() instead
* (which does perform accounting).