33 #ifndef AKMALLOC_MALLOC_C
34 #define AKMALLOC_MALLOC_C
85 #define AKMALLOC_MAJOR_VER 1
86 #define AKMALLOC_MINOR_VER 0
87 #define AKMALLOC_PATCH_VER 1
89 #if !defined(AKMALLOC_INCLUDE_ONLY) && !defined(AKMALLOC_LINK_STATIC) && !defined(AKMALLOC_BUILD)
90 # error "You must set one of the defined AKMALLOC_INCLUDE_ONLY (header only), AKMALLOC_LINK_STATIC (static libs), or AKMALLOC_BUILD (shared libs)"
111 #if defined(_MSC_VER)
112 # define AKMALLOC_MSVC 1
115 #if defined(__GNUC__) && !defined(__clang__)
116 # define AKMALLOC_GCC 1
119 #if defined(__clang__)
120 # define AKMALLOC_CLANG 1
123 #if !AKMALLOC_MSVC && !AKMALLOC_GCC && !AKMALLOC_CLANG
124 # error "Unsupported compiler!"
131 # define AKMALLOC_LINUX 0
132 # define AKMALLOC_WINDOWS 1
133 # define AKMALLOC_IOS 0
134 # define AKMALLOC_MACOS 0
137 #if defined(__linux__)
138 # define AKMALLOC_LINUX 1
139 # define AKMALLOC_WINDOWS 0
140 # define AKMALLOC_IOS 0
141 # define AKMALLOC_MACOS 0
144 #if defined(__APPLE__)
145 #include <TargetConditionals.h>
146 # if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
147 # define AKMALLOC_LINUX 0
148 # define AKMALLOC_WINDOWS 0
149 # define AKMALLOC_IOS 1
150 # define AKMALLOC_MACOS 0
152 # define AKMALLOC_LINUX 0
153 # define AKMALLOC_WINDOWS 0
154 # define AKMALLOC_IOS 0
155 # define AKMALLOC_MACOS 1
157 # error "Unknown Apple platform"
161 #if !AKMALLOC_LINUX && !AKMALLOC_WINDOWS && !AKMALLOC_IOS && !AKMALLOC_MACOS
162 # error "Invalid or unsupported platform!"
171 # define AKMALLOC_BITNESS 64
173 # define AKMALLOC_BITNESS 32
177 #if AKMALLOC_GCC || AKMALLOC_CLANG
178 # if defined(__LP64__) || defined(__x86_64__) || defined(__ppc64__) || defined(__aarch64__) || defined(__ARM_ARCH_ISA_A64)
179 # define AKMALLOC_BITNESS 64
181 # define AKMALLOC_BITNESS 32
185 #if !defined(AKMALLOC_BITNESS) || (AKMALLOC_BITNESS != 32 && AKMALLOC_BITNESS != 64)
186 # error "Invalid bitness or bitness undefined."
193 #if defined(__arm__) || defined(_M_ARM)
194 # define AKMALLOC_ARM 1
196 # define AKMALLOC_ARM 0
201 #if !defined(AK_EXTERN_C_BEGIN)
202 # if defined(__cplusplus)
203 # define AK_EXTERN_C_BEGIN extern "C" {
204 # define AK_EXTERN_C_END }
206 # define AK_EXTERN_C_BEGIN
207 # define AK_EXTERN_C_END
211 #if !defined(AKMALLOC_INCLUDE_ONLY)
215 #if !AKMALLOC_WINDOWS
222 typedef unsigned int ak_u32;
225 typedef __int64 ak_i64;
226 typedef unsigned __int64 ak_u64;
228 typedef long long int ak_i64;
229 typedef unsigned long long int ak_u64;
232 #if AKMALLOC_BITNESS == 32
233 typedef ak_u32 ak_sz;
234 typedef ak_i32 ak_ssz;
236 typedef ak_u64 ak_sz;
237 typedef ak_i64 ak_ssz;
241 # define ak_inline __forceinline
243 # define ak_inline __inline__ __attribute__((always_inline))
252 # define ak_likely(x) x
253 # define ak_unlikely(x) x
255 # define ak_likely(x) __builtin_expect(!!(x), 1)
256 # define ak_unlikely(x) __builtin_expect(!!(x), 0)
259 #define AK_SZ_ONE ((ak_sz)1)
261 #define AK_SZ_MAX (~((ak_sz)0))
262 #define AK_U32_MAX (~((ak_u32)0))
266 #define AKMALLOC_DEFAULT_PAGE_SIZE 4096
267 #define AKMALLOC_DEFAULT_LARGE_BLOCK_SIZE (AK_SZ_ONE << 21)
273 # define AKMALLOC_CACHE_LINE_LENGTH 32
275 # define AKMALLOC_CACHE_LINE_LENGTH 64
278 #define ak_as_ptr(x) (&(x))
280 #define ak_ptr_cast(ty, expr) ((ty*)((void*)(expr)))
284 #if !defined(AKMALLOC_ASSERT_IMPL)
287 # define AKMALLOC_ASSERT_IMPL(x) \
289 fprintf(stderr, "%s (%d) : %s\n", __FILE__, __LINE__, "ASSERT: failed condition `" #x "'"); \
293 static void ak_call_abort()
299 #if !defined(AKMALLOC_ASSERT)
300 # if !defined(NDEBUG)
301 # define AKMALLOC_ASSERT AKMALLOC_ASSERT_IMPL
303 # define AKMALLOC_ASSERT(...) do { } while(0)
307 #if !defined(AKMALLOC_ASSERT_ALWAYS)
308 # define AKMALLOC_ASSERT_ALWAYS AKMALLOC_ASSERT_IMPL
311 #if !defined(AKMALLOC_DEBUG_PRINT)
315 #if defined(AKMALLOC_DEBUG_PRINT)
316 # define DBG_PRINTF(...) fprintf(stdout, __VA_ARGS__); fflush(stdout)
318 # define DBG_PRINTF(...) (void)0
323 typedef struct ak_spinlock_tag ak_spinlock;
325 struct ak_spinlock_tag
330 static void ak_os_sleep(ak_u32 micros);
332 static void ak_spinlock_yield();
334 #if !AKMALLOC_MSVC && (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) > 40100
335 # define ak_atomic_cas(px, nx, ox) __sync_bool_compare_and_swap((px), (ox), (nx))
336 # define ak_atomic_xchg(px, nx) __sync_lock_test_and_set((px), (nx))
341 long __cdecl _InterlockedCompareExchange(
long volatile* Target,
long NewValue,
long OldValue);
342 long __cdecl _InterlockedExchange(
long volatile* Target,
long NewValue);
344 # pragma intrinsic (_InterlockedCompareExchange)
346 # define ak_atomic_cas(px, nx, ox) (_InterlockedCompareExchange((volatile long*)(px), (nx), (ox)) == (ox))
347 # define ak_atomic_xchg(px, nx) _InterlockedExchange((volatile long*)(px), (nx))
350 ak_inline
static int ak_spinlock_is_locked(ak_spinlock* p)
352 return *(
volatile ak_u32*)(&(p->islocked));
355 ak_inline
static void ak_spinlock_init(ak_spinlock* p)
360 ak_inline
static void ak_spinlock_acquire(ak_spinlock* p)
365 # define SPINS_PER_YIELD 63
366 #elif AKMALLOC_MACOS || AKMALLOC_IOS
367 # define SPINS_PER_YIELD 15
369 # define SPINS_PER_YIELD 31
372 if (ak_atomic_xchg(&(p->islocked), 1)) {
373 while (ak_atomic_xchg(&(p->islocked), 1)) {
374 if ((++spins & SPINS_PER_YIELD) == 0) {
375 #if AKMALLOC_MACOS || AKMALLOC_IOS
384 #undef SPINS_PER_YIELD
386 AKMALLOC_ASSERT(ak_spinlock_is_locked(p));
389 ak_inline
static void ak_spinlock_release(ak_spinlock* p)
391 AKMALLOC_ASSERT(ak_spinlock_is_locked(p));
392 ak_atomic_xchg(&(p->islocked), 0);
397 #define WIN32_LEAN_AND_MEAN
401 static void ak_os_sleep(ak_u32 micros)
403 SleepEx(micros / 1000, FALSE);
406 static void ak_spinlock_yield()
416 static void ak_os_sleep(ak_u32 micros)
421 static void ak_spinlock_yield()
434 typedef ak_u32 ak_bitset32;
436 ak_inline
static int ak_bitset_all(
const ak_bitset32* bs)
438 return (*bs == 0xFFFFFFFF) ? 1 : 0;
441 ak_inline
static int ak_bitset_any(
const ak_bitset32* bs)
443 return *(
const int*)bs;
446 ak_inline
static int ak_bitset_none(
const ak_bitset32* bs)
448 return (*bs == 0x00000000) ? 1 : 0;
451 ak_inline
static void ak_bitset_set_all(ak_bitset32* bs)
456 ak_inline
static void ak_bitset_clear_all(ak_bitset32* bs)
461 #define ak_bitset_set(bs, i) \
462 (*(bs) = (*(bs) | (0x00000001 << (i))))
464 #define ak_bitset_clear(bs, i) \
465 (*(bs) = (*(bs) & (~(0x00000001 << (i)))))
467 ak_inline
static void ak_bitset_set_n(ak_bitset32* bs,
int i,
int n)
469 ak_bitset32 mask = ~(0xFFFFFFFF << n);
470 *bs = *bs | (mask << i);
473 ak_inline
static void ak_bitset_clear_n(ak_bitset32* bs,
int i,
int n)
475 ak_bitset32 mask = ~(0xFFFFFFFF << n);
476 *bs = *bs & (~(mask << i));
479 #define ak_bitset_get(bs, i) \
480 ((*(bs) & (0x00000001 << (i))))
482 ak_inline
static ak_bitset32 ak_bitset_get_n(
const ak_bitset32* bs,
int i,
int n)
484 ak_bitset32 mask = ~(0xFFFFFFFF << n);
485 return (*bs & (mask << i)) >> i;
489 # define ak_bitset_fill_num_leading_zeros(bs, out) \
492 out = (_BitScanReverse(&ldz, *(bs))) ? (31 - ldz) : 32; \
495 # define ak_bitset_fill_num_leading_zeros(bs, out) \
496 out = (*bs) ? __builtin_clz(*bs) : 32
499 ak_inline
static int ak_bitset_num_leading_zeros(
const ak_bitset32* bs)
502 ak_bitset_fill_num_leading_zeros(bs, nlz);
507 # define ak_bitset_fill_num_trailing_zeros(bs, out) \
510 out = (_BitScanForward(&trz, *(bs))) ? trz : 32; \
513 # define ak_bitset_fill_num_trailing_zeros(bs, out) \
514 out = (*(bs)) ? __builtin_ctz(*(bs)) : 32
517 ak_inline
static int ak_bitset_num_trailing_zeros(
const ak_bitset32* bs)
520 ak_bitset_fill_num_trailing_zeros(bs, ntz);
524 ak_inline
static int ak_bitset_num_leading_ones(
const ak_bitset32* bs)
526 ak_bitset32 copy = ~(*bs);
527 return ak_bitset_num_leading_zeros(©);
530 ak_inline
static int ak_bitset_num_trailing_ones(
const ak_bitset32* bs)
532 ak_bitset32 copy = ~(*bs);
533 return ak_bitset_num_trailing_zeros(©);
536 ak_inline
static void ak_bitset_flip(ak_bitset32* bs)
543 struct ak_bitset512_tag
563 typedef struct ak_bitset512_tag ak_bitset512;
565 ak_inline
static int ak_bitset512_all(
const ak_bitset512* bs)
567 return ak_bitset_all(&(bs->a0)) &&
568 ak_bitset_all(&(bs->a1)) &&
569 ak_bitset_all(&(bs->a2)) &&
570 ak_bitset_all(&(bs->a3)) &&
571 ak_bitset_all(&(bs->a4)) &&
572 ak_bitset_all(&(bs->a5)) &&
573 ak_bitset_all(&(bs->a6)) &&
574 ak_bitset_all(&(bs->a7)) &&
575 ak_bitset_all(&(bs->a8)) &&
576 ak_bitset_all(&(bs->a9)) &&
577 ak_bitset_all(&(bs->a10)) &&
578 ak_bitset_all(&(bs->a11)) &&
579 ak_bitset_all(&(bs->a12)) &&
580 ak_bitset_all(&(bs->a13)) &&
581 ak_bitset_all(&(bs->a14)) &&
582 ak_bitset_all(&(bs->a15));
585 ak_inline
static int ak_bitset512_any(
const ak_bitset512* bs)
587 return ak_bitset_any(&(bs->a0)) ||
588 ak_bitset_any(&(bs->a1)) ||
589 ak_bitset_any(&(bs->a2)) ||
590 ak_bitset_any(&(bs->a3)) ||
591 ak_bitset_any(&(bs->a4)) ||
592 ak_bitset_any(&(bs->a5)) ||
593 ak_bitset_any(&(bs->a6)) ||
594 ak_bitset_any(&(bs->a7)) ||
595 ak_bitset_any(&(bs->a8)) ||
596 ak_bitset_any(&(bs->a9)) ||
597 ak_bitset_any(&(bs->a10)) ||
598 ak_bitset_any(&(bs->a11)) ||
599 ak_bitset_any(&(bs->a12)) ||
600 ak_bitset_any(&(bs->a13)) ||
601 ak_bitset_any(&(bs->a14)) ||
602 ak_bitset_any(&(bs->a15));
605 ak_inline
static int ak_bitset512_none(
const ak_bitset512* bs)
607 return ak_bitset_none(&(bs->a0)) &&
608 ak_bitset_none(&(bs->a1)) &&
609 ak_bitset_none(&(bs->a2)) &&
610 ak_bitset_none(&(bs->a3)) &&
611 ak_bitset_none(&(bs->a4)) &&
612 ak_bitset_none(&(bs->a5)) &&
613 ak_bitset_none(&(bs->a6)) &&
614 ak_bitset_none(&(bs->a7)) &&
615 ak_bitset_none(&(bs->a8)) &&
616 ak_bitset_none(&(bs->a9)) &&
617 ak_bitset_none(&(bs->a10)) &&
618 ak_bitset_none(&(bs->a11)) &&
619 ak_bitset_none(&(bs->a12)) &&
620 ak_bitset_none(&(bs->a13)) &&
621 ak_bitset_none(&(bs->a14)) &&
622 ak_bitset_none(&(bs->a15));
625 ak_inline
static void ak_bitset512_set_all(ak_bitset512* bs)
627 ak_bitset_set_all(&(bs->a0));
628 ak_bitset_set_all(&(bs->a1));
629 ak_bitset_set_all(&(bs->a2));
630 ak_bitset_set_all(&(bs->a3));
631 ak_bitset_set_all(&(bs->a4));
632 ak_bitset_set_all(&(bs->a5));
633 ak_bitset_set_all(&(bs->a6));
634 ak_bitset_set_all(&(bs->a7));
635 ak_bitset_set_all(&(bs->a8));
636 ak_bitset_set_all(&(bs->a9));
637 ak_bitset_set_all(&(bs->a10));
638 ak_bitset_set_all(&(bs->a11));
639 ak_bitset_set_all(&(bs->a12));
640 ak_bitset_set_all(&(bs->a13));
641 ak_bitset_set_all(&(bs->a14));
642 ak_bitset_set_all(&(bs->a15));
645 ak_inline
static void ak_bitset512_clear_all(ak_bitset512* bs)
647 ak_bitset_clear_all(&(bs->a0));
648 ak_bitset_clear_all(&(bs->a1));
649 ak_bitset_clear_all(&(bs->a2));
650 ak_bitset_clear_all(&(bs->a3));
651 ak_bitset_clear_all(&(bs->a4));
652 ak_bitset_clear_all(&(bs->a5));
653 ak_bitset_clear_all(&(bs->a6));
654 ak_bitset_clear_all(&(bs->a7));
655 ak_bitset_clear_all(&(bs->a8));
656 ak_bitset_clear_all(&(bs->a9));
657 ak_bitset_clear_all(&(bs->a10));
658 ak_bitset_clear_all(&(bs->a11));
659 ak_bitset_clear_all(&(bs->a12));
660 ak_bitset_clear_all(&(bs->a13));
661 ak_bitset_clear_all(&(bs->a14));
662 ak_bitset_clear_all(&(bs->a15));
665 ak_inline
static void ak_bitset512_set(ak_bitset512* bs,
int idx)
669 ak_bitset_set(&(bs->a15), idx & 31);
672 ak_bitset_set(&(bs->a14), idx & 31);
675 ak_bitset_set(&(bs->a13), idx & 31);
678 ak_bitset_set(&(bs->a12), idx & 31);
681 ak_bitset_set(&(bs->a11), idx & 31);
684 ak_bitset_set(&(bs->a10), idx & 31);
687 ak_bitset_set(&(bs->a9), idx & 31);
690 ak_bitset_set(&(bs->a8), idx & 31);
693 ak_bitset_set(&(bs->a7), idx & 31);
696 ak_bitset_set(&(bs->a6), idx & 31);
699 ak_bitset_set(&(bs->a5), idx & 31);
702 ak_bitset_set(&(bs->a4), idx & 31);
705 ak_bitset_set(&(bs->a3), idx & 31);
708 ak_bitset_set(&(bs->a2), idx & 31);
711 ak_bitset_set(&(bs->a1), idx & 31);
714 ak_bitset_set(&(bs->a0), idx & 31);
717 AKMALLOC_ASSERT_ALWAYS(0 &&
"Invalid bitset index");
722 ak_inline
static void ak_bitset512_clear(ak_bitset512* bs,
int idx)
726 ak_bitset_clear(&(bs->a15), idx & 31);
729 ak_bitset_clear(&(bs->a14), idx & 31);
732 ak_bitset_clear(&(bs->a13), idx & 31);
735 ak_bitset_clear(&(bs->a12), idx & 31);
738 ak_bitset_clear(&(bs->a11), idx & 31);
741 ak_bitset_clear(&(bs->a10), idx & 31);
744 ak_bitset_clear(&(bs->a9), idx & 31);
747 ak_bitset_clear(&(bs->a8), idx & 31);
750 ak_bitset_clear(&(bs->a7), idx & 31);
753 ak_bitset_clear(&(bs->a6), idx & 31);
756 ak_bitset_clear(&(bs->a5), idx & 31);
759 ak_bitset_clear(&(bs->a4), idx & 31);
762 ak_bitset_clear(&(bs->a3), idx & 31);
765 ak_bitset_clear(&(bs->a2), idx & 31);
768 ak_bitset_clear(&(bs->a1), idx & 31);
771 ak_bitset_clear(&(bs->a0), idx & 31);
774 AKMALLOC_ASSERT_ALWAYS(0 &&
"Invalid bitset index");
779 ak_inline
static ak_bitset32 ak_bitset512_get(
const ak_bitset512* bs,
int idx)
783 return ak_bitset_get(&(bs->a15), idx & 31);
785 return ak_bitset_get(&(bs->a14), idx & 31);
787 return ak_bitset_get(&(bs->a13), idx & 31);
789 return ak_bitset_get(&(bs->a12), idx & 31);
791 return ak_bitset_get(&(bs->a11), idx & 31);
793 return ak_bitset_get(&(bs->a10), idx & 31);
795 return ak_bitset_get(&(bs->a9), idx & 31);
797 return ak_bitset_get(&(bs->a8), idx & 31);
799 return ak_bitset_get(&(bs->a7), idx & 31);
801 return ak_bitset_get(&(bs->a6), idx & 31);
803 return ak_bitset_get(&(bs->a5), idx & 31);
805 return ak_bitset_get(&(bs->a4), idx & 31);
807 return ak_bitset_get(&(bs->a3), idx & 31);
809 return ak_bitset_get(&(bs->a2), idx & 31);
811 return ak_bitset_get(&(bs->a1), idx & 31);
813 return ak_bitset_get(&(bs->a0), idx & 31);
815 AKMALLOC_ASSERT_ALWAYS(0 &&
"Invalid bitset index");
820 #define ak_bitset512_fill_num_leading_zeros(bs, nlz) \
824 ak_bitset_fill_num_leading_zeros(&(bs->a0), cur); \
827 ak_bitset_fill_num_leading_zeros(&(bs->a1), cur); \
830 ak_bitset_fill_num_leading_zeros(&(bs->a2), cur); \
833 ak_bitset_fill_num_leading_zeros(&(bs->a3), cur); \
836 ak_bitset_fill_num_leading_zeros(&(bs->a4), cur); \
839 ak_bitset_fill_num_leading_zeros(&(bs->a5), cur); \
842 ak_bitset_fill_num_leading_zeros(&(bs->a6), cur); \
845 ak_bitset_fill_num_leading_zeros(&(bs->a7), cur); \
848 ak_bitset_fill_num_leading_zeros(&(bs->a8), cur); \
851 ak_bitset_fill_num_leading_zeros(&(bs->a9), cur); \
854 ak_bitset_fill_num_leading_zeros(&(bs->a10), cur); \
857 ak_bitset_fill_num_leading_zeros(&(bs->a11), cur); \
860 ak_bitset_fill_num_leading_zeros(&(bs->a12), cur); \
863 ak_bitset_fill_num_leading_zeros(&(bs->a13), cur); \
866 ak_bitset_fill_num_leading_zeros(&(bs->a14), cur); \
869 ak_bitset_fill_num_leading_zeros(&(bs->a15), cur); \
871 } } } } } } } } } } } } } } } \
874 #define ak_bitset512_fill_num_leading_ones(bs, nlz) \
880 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
884 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
888 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
892 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
896 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
900 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
904 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
908 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
912 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
916 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
920 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
924 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
928 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
932 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
936 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
940 ak_bitset_fill_num_leading_zeros(&mybs, cur); \
942 } } } } } } } } } } } } } } } \
945 ak_inline
static int ak_bitset512_num_leading_zeros(
const ak_bitset512* bs)
948 ak_bitset512_fill_num_leading_zeros(bs, nlz);
952 #define ak_bitset512_fill_num_trailing_zeros(bs, ntz) \
956 ak_bitset_fill_num_trailing_zeros(&(bs->a15), cur); \
959 ak_bitset_fill_num_trailing_zeros(&(bs->a14), cur); \
962 ak_bitset_fill_num_trailing_zeros(&(bs->a13), cur); \
965 ak_bitset_fill_num_trailing_zeros(&(bs->a12), cur); \
968 ak_bitset_fill_num_trailing_zeros(&(bs->a11), cur); \
971 ak_bitset_fill_num_trailing_zeros(&(bs->a10), cur); \
974 ak_bitset_fill_num_trailing_zeros(&(bs->a9), cur); \
977 ak_bitset_fill_num_trailing_zeros(&(bs->a8), cur); \
980 ak_bitset_fill_num_trailing_zeros(&(bs->a7), cur); \
983 ak_bitset_fill_num_trailing_zeros(&(bs->a6), cur); \
986 ak_bitset_fill_num_trailing_zeros(&(bs->a5), cur); \
989 ak_bitset_fill_num_trailing_zeros(&(bs->a4), cur); \
992 ak_bitset_fill_num_trailing_zeros(&(bs->a3), cur); \
995 ak_bitset_fill_num_trailing_zeros(&(bs->a2), cur); \
998 ak_bitset_fill_num_trailing_zeros(&(bs->a1), cur); \
1001 ak_bitset_fill_num_trailing_zeros(&(bs->a0), cur); \
1003 } } } } } } } } } } } } } } } \
1006 #define ak_bitset512_fill_num_trailing_ones(bs, ntz) \
1011 mybs = ~(bs->a15); \
1012 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1015 mybs = ~(bs->a14); \
1016 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1019 mybs = ~(bs->a13); \
1020 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1023 mybs = ~(bs->a12); \
1024 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1027 mybs = ~(bs->a11); \
1028 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1031 mybs = ~(bs->a10); \
1032 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1036 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1040 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1044 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1048 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1052 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1056 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1060 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1064 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1068 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1072 ak_bitset_fill_num_trailing_zeros(&mybs, cur); \
1074 } } } } } } } } } } } } } } } \
1078 ak_inline
static int ak_bitset512_num_trailing_zeros(
const ak_bitset512* bs)
1081 ak_bitset512_fill_num_trailing_zeros(bs, ntz);
1085 ak_inline
static void ak_bitset512_flip(ak_bitset512* bs)
1087 ak_bitset_flip(&(bs->a0));
1088 ak_bitset_flip(&(bs->a1));
1089 ak_bitset_flip(&(bs->a2));
1090 ak_bitset_flip(&(bs->a3));
1091 ak_bitset_flip(&(bs->a4));
1092 ak_bitset_flip(&(bs->a5));
1093 ak_bitset_flip(&(bs->a6));
1094 ak_bitset_flip(&(bs->a7));
1095 ak_bitset_flip(&(bs->a8));
1096 ak_bitset_flip(&(bs->a9));
1097 ak_bitset_flip(&(bs->a10));
1098 ak_bitset_flip(&(bs->a11));
1099 ak_bitset_flip(&(bs->a12));
1100 ak_bitset_flip(&(bs->a13));
1101 ak_bitset_flip(&(bs->a14));
1102 ak_bitset_flip(&(bs->a15));
1105 ak_inline
static int ak_bitset512_num_leading_ones(
const ak_bitset512* bs)
1108 ak_bitset512_fill_num_leading_ones(bs, nlo);
1112 ak_inline
static int ak_bitset512_num_trailing_ones(
const ak_bitset512* bs)
1115 ak_bitset512_fill_num_trailing_ones(bs, nto);
1121 #if defined(AKMALLOC_GETPAGESIZE)
1122 # error "Page size can only be set to an internal default."
1124 # define AKMALLOC_GETPAGESIZE AKMALLOC_DEFAULT_GETPAGESIZE
1127 #if !defined(AKMALLOC_MMAP) && !defined(AKMALLOC_MUNMAP)
1128 # define AKMALLOC_MMAP AKMALLOC_DEFAULT_MMAP
1129 # define AKMALLOC_MUNMAP AKMALLOC_DEFAULT_MUNMAP
1130 #elif defined(AKMALLOC_MMAP) && defined(AKMALLOC_MUNMAP)
1133 # error "AKMALLOC_MMAP and AKMALLOC_MUNMAP not defined simultaneously."
1140 #if AKMALLOC_WINDOWS
1142 #define WIN32_LEAN_AND_MEAN
1144 #include <Windows.h>
1146 ak_inline
static ak_sz ak_page_size()
1148 static ak_sz PGSZ = 0;
1149 if (ak_unlikely(!PGSZ)) {
1152 PGSZ = si.dwPageSize;
1157 ak_inline
static void* ak_mmap(ak_sz s)
1159 return VirtualAlloc(0, s, MEM_RESERVE|MEM_COMMIT, PAGE_READWRITE);
1162 ak_inline
static void ak_munmap(
void* p, ak_sz s)
1164 (void)VirtualFree(p, s, MEM_RELEASE);
1169 #include <sys/mman.h>
1171 ak_inline
static void* ak_mmap(ak_sz s)
1173 void* addr = mmap(0, s, PROT_READ|PROT_WRITE, MAP_PRIVATE|MAP_ANON, -1, 0);
1174 return (addr == (
void*)AK_SZ_MAX) ? 0 : addr;
1177 ak_inline
static void ak_munmap(
void* p, ak_sz s)
1184 ak_inline
static ak_sz ak_page_size()
1186 static ak_sz pgsz = 0;
1187 if (ak_unlikely(!pgsz)) {
1188 pgsz = sysconf(_SC_PAGESIZE);
1195 #define AKMALLOC_DEFAULT_GETPAGESIZE ak_page_size
1196 #define AKMALLOC_DEFAULT_MMAP(s) ak_mmap((s))
1197 #define AKMALLOC_DEFAULT_MUNMAP(a, s) ak_munmap((a), (s))
1199 static void* ak_os_alloc(
size_t sz)
1201 static const ak_sz pgsz = AKMALLOC_DEFAULT_PAGE_SIZE;
1203 AKMALLOC_ASSERT_ALWAYS(pgsz == ak_page_size());
1204 void* mem = AKMALLOC_MMAP(sz);
1205 DBG_PRINTF(
"osmap,%p,%zu,%zu pages,iswhole %d\n", mem, sz, sz/AKMALLOC_DEFAULT_PAGE_SIZE, sz == AKMALLOC_DEFAULT_PAGE_SIZE*(sz/AKMALLOC_DEFAULT_PAGE_SIZE));
1209 static void ak_os_free(
void* p,
size_t sz)
1211 DBG_PRINTF(
"osunmap,%p,%zu\n", p, sz);
1212 AKMALLOC_MUNMAP(p, sz);
1215 ak_inline
static void* ak_page_start_before(
void* p)
1217 return (
void*)((ak_sz)p & (~(ak_sz)(AKMALLOC_DEFAULT_PAGE_SIZE - 1)));
1220 ak_inline
static const void* ak_page_start_before_const(
const void* p)
1222 return (
void*)((ak_sz)p & (~(ak_sz)(AKMALLOC_DEFAULT_PAGE_SIZE - 1)));
1231 #if !defined(AKMALLOC_USE_LOCKS) || AKMALLOC_USE_LOCKS
1232 # define AK_MALLOCSTATE_USE_LOCKS
1235 #if defined(AK_MALLOCSTATE_USE_LOCKS)
1236 # define AK_SLAB_USE_LOCKS
1237 # define AK_CA_USE_LOCKS
1238 # define AKMALLOC_LOCK_DEFINE(nm) ak_spinlock nm
1239 # define AKMALLOC_LOCK_INIT(lk) ak_spinlock_init((lk))
1240 # define AKMALLOC_LOCK_ACQUIRE(lk) ak_spinlock_acquire((lk))
1241 # define AKMALLOC_LOCK_RELEASE(lk) ak_spinlock_release((lk))
1243 # define AKMALLOC_LOCK_DEFINE(nm)
1244 # define AKMALLOC_LOCK_INIT(lk)
1245 # define AKMALLOC_LOCK_ACQUIRE(lk)
1246 # define AKMALLOC_LOCK_RELEASE(lk)
1249 #if !defined(AK_COALESCE_SEGMENT_GRANULARITY)
1250 # define AK_COALESCE_SEGMENT_GRANULARITY (((size_t)1) << 18)
1253 #if !defined(AK_SEG_CBK_DEFINED)
1312 typedef struct ak_slab_tag ak_slab;
1316 #if defined(AK_SLAB_USE_LOCKS)
1317 # define AK_SLAB_LOCK_DEFINE(nm) ak_spinlock nm
1318 # define AK_SLAB_LOCK_INIT(root) ak_spinlock_init(ak_as_ptr((root)->LOCKED))
1319 # define AK_SLAB_LOCK_ACQUIRE(root) ak_spinlock_acquire(ak_as_ptr((root)->LOCKED))
1320 # define AK_SLAB_LOCK_RELEASE(root) ak_spinlock_release(ak_as_ptr((root)->LOCKED))
1322 # define AK_SLAB_LOCK_DEFINE(nm)
1323 # define AK_SLAB_LOCK_INIT(root)
1324 # define AK_SLAB_LOCK_ACQUIRE(root)
1325 # define AK_SLAB_LOCK_RELEASE(root)
1358 #if !defined(AK_SLAB_RELEASE_RATE)
1359 # define AK_SLAB_RELEASE_RATE 127
1362 #if !defined(AK_SLAB_MAX_PAGES_TO_FREE)
1363 # define AK_SLAB_MAX_PAGES_TO_FREE AK_SLAB_RELEASE_RATE
1370 #define ak_slab_unlink(slab) \
1372 ak_slab* const sU = (slab); \
1373 sU->bk->fd = (sU->fd); \
1374 sU->fd->bk = (sU->bk); \
1375 sU->fd = sU->bk = AK_NULLPTR; \
1378 #define ak_slab_link_fd(slab, fwd) \
1380 ak_slab* const sLF = (slab); \
1381 ak_slab* const fLF = (fwd); \
1386 #define ak_slab_link_bk(slab, back) \
1388 ak_slab* const sLB = (slab); \
1389 ak_slab* const bLB = (back); \
1394 #define ak_slab_link(slab, fwd, back) \
1396 ak_slab* const sL = (slab); \
1397 ak_slab* const fL = (fwd); \
1398 ak_slab* const bL = (back); \
1399 ak_slab_link_bk(sL, bL); \
1400 ak_slab_link_fd(sL, fL); \
1403 ak_inline
static void ak_slab_init_chain_head(ak_slab* s, ak_slab_root* rootp)
1407 ak_bitset512_clear_all(&(s->avail));
1410 ak_inline
static ak_sz ak_num_pages_for_sz(ak_sz sz)
1415 #define ak_slab_init(m, s, av, r) \
1417 void* slabmem = (m); \
1418 ak_sz slabsz = (s); \
1419 ak_sz slabnavail = (av); \
1420 ak_slab_root* slabroot = (r); \
1422 AKMALLOC_ASSERT(slabmem); \
1423 AKMALLOC_ASSERT(slabsz < (AKMALLOC_DEFAULT_PAGE_SIZE - sizeof(ak_slab))); \
1424 AKMALLOC_ASSERT(slabsz > 0); \
1425 AKMALLOC_ASSERT(slabsz % 2 == 0); \
1427 AKMALLOC_ASSERT(slabnavail < 512); \
1428 AKMALLOC_ASSERT(slabnavail > 0); \
1430 ak_slab* s = (ak_slab*)slabmem; \
1431 s->fd = s->bk = AK_NULLPTR; \
1432 s->root = slabroot; \
1433 ak_bitset512_clear_all(&(s->avail)); \
1434 int inavail = (int)slabnavail; \
1435 for (int i = 0; i < inavail; ++i) { \
1436 ak_bitset512_set(&(s->avail), i); \
1441 ak_inline
static ak_slab* ak_slab_new_init(
char* mem, ak_sz sz, ak_sz navail, ak_slab* fd, ak_slab* bk, ak_slab_root* root)
1443 ak_slab_init(mem, sz, navail, root);
1444 ak_slab* slab = ak_ptr_cast(ak_slab, mem);
1445 ak_slab_link(slab, fd, bk);
1449 static ak_slab* ak_slab_new_alloc(ak_sz sz, ak_slab* fd, ak_slab* bk, ak_slab_root* root)
1451 const int NPAGES = root->npages;
1454 char*
const mem = (
char*)ak_os_alloc(NPAGES * AKMALLOC_DEFAULT_PAGE_SIZE);
1456 if (ak_unlikely(!mem)) {
return AK_NULLPTR; }
1459 ak_sz navail = root->navail;
1462 for (
int i = 0; i < NPAGES - 1; ++i) {
1463 ak_slab* nextpage = ak_ptr_cast(ak_slab, (cmem + AKMALLOC_DEFAULT_PAGE_SIZE));
1464 ak_slab* curr = ak_slab_new_init(cmem, sz, navail, nextpage, bk, root);
1465 AKMALLOC_ASSERT(ak_bitset512_num_trailing_ones(&(curr->avail)) == (
int)navail);
1468 cmem += AKMALLOC_DEFAULT_PAGE_SIZE;
1471 ak_slab_new_init(cmem, sz, navail, fd, bk, root);
1473 return ak_ptr_cast(ak_slab, mem);
1476 static ak_slab* ak_slab_new_reuse(ak_sz sz, ak_slab* fd, ak_slab* bk, ak_slab_root* root)
1478 AKMALLOC_ASSERT(root->nempty >= 1);
1480 ak_sz navail = root->navail;
1481 ak_slab*
const curr = root->empty_root.fd;
1482 ak_slab_unlink(curr);
1483 ak_slab_new_init((
char*)curr, sz, navail, fd, bk, root);
1490 ak_inline
static ak_slab* ak_slab_new(ak_sz sz, ak_slab* fd, ak_slab* bk, ak_slab_root* root)
1492 return (root->nempty > 0)
1493 ? ak_slab_new_reuse(sz, fd, bk, root)
1494 : ak_slab_new_alloc(sz, fd, bk, root);
1497 #define ak_slab_2_mem(s) (char*)(void*)((s) + 1)
1499 ak_inline
static int ak_slab_all_free(ak_slab* s)
1501 const ak_bitset512* pavail = &(s->avail);
1503 ak_bitset512_fill_num_trailing_ones(pavail, nto);
1504 return nto == s->root->navail;
1507 ak_inline
static int ak_slab_none_free(ak_slab* s)
1509 const ak_bitset512* pavail = &(s->avail);
1511 ak_bitset512_fill_num_trailing_zeros(pavail, ntz);
1515 static void* ak_slab_search(ak_slab* s, ak_sz sz, ak_u32 navail, ak_slab** pslab,
int* pntz)
1517 const ak_slab*
const root = s;
1518 void* mem = AK_NULLPTR;
1519 if (ak_likely(s->fd != root)) {
1520 AKMALLOC_ASSERT(pslab);
1521 AKMALLOC_ASSERT(pntz);
1525 AKMALLOC_ASSERT(ak_bitset512_num_trailing_zeros(&(s->avail)) != 512);
1527 const ak_bitset512* pavail = &(s->avail);
1529 ak_bitset512_fill_num_trailing_zeros(pavail, ntz);
1531 AKMALLOC_ASSERT(ak_bitset512_get(&(s->avail), ntz));
1532 ak_bitset512_clear(&(s->avail), ntz);
1533 mem = ak_slab_2_mem(s) + (ntz * sz);
1536 if (ntz == (
int)navail - 1) {
1539 ak_bitset512_fill_num_trailing_zeros(pavail, ntz);
1546 static void ak_slab_release_pages(ak_slab_root* root, ak_slab* s, ak_u32 numtofree)
1548 ak_slab*
const r = s;
1549 ak_slab* next = AK_NULLPTR;
1551 for (ak_u32 ct = 0; ct < numtofree; ++ct) {
1558 ak_os_free(s, AKMALLOC_DEFAULT_PAGE_SIZE);
1563 ak_inline
static void ak_slab_release_os_mem(ak_slab_root* root)
1565 ak_u32 numtofree = root->nempty;
1566 numtofree = (numtofree > root->MAX_PAGES_TO_FREE)
1567 ? root->MAX_PAGES_TO_FREE
1569 ak_slab_release_pages(root, &(root->empty_root), numtofree);
1570 root->nempty -= numtofree;
1586 static void ak_slab_init_root(ak_slab_root* s, ak_sz sz, ak_u32 npages, ak_u32 relrate, ak_u32 maxpagefree)
1589 s->navail = (ak_u32)(AKMALLOC_DEFAULT_PAGE_SIZE -
sizeof(ak_slab))/(ak_u32)sz;
1594 ak_slab_init_chain_head(&(s->partial_root), s);
1595 ak_slab_init_chain_head(&(s->full_root), s);
1596 ak_slab_init_chain_head(&(s->empty_root), s);
1598 s->RELEASE_RATE = relrate;
1599 s->MAX_PAGES_TO_FREE = maxpagefree;
1600 AK_SLAB_LOCK_INIT(s);
1610 ak_slab_init_root(s, sz, (ak_u32)ak_num_pages_for_sz(sz), (ak_u32)(AK_SLAB_RELEASE_RATE), (ak_u32)(AK_SLAB_MAX_PAGES_TO_FREE));
1622 ak_slab* slab = AK_NULLPTR;
1624 AK_SLAB_LOCK_ACQUIRE(root);
1625 const ak_sz sz = root->sz;
1627 void* mem = ak_slab_search(&(root->partial_root), sz, root->navail, &slab, &ntz);
1629 if (ak_unlikely(!mem)) {
1630 slab = ak_slab_new(sz, root->partial_root.fd, &(root->partial_root), root);
1631 if (ak_likely(slab)) {
1632 AKMALLOC_ASSERT(ak_bitset512_get(&(slab->avail), 0));
1633 ak_bitset512_clear(&(slab->avail), 0);
1634 mem = ak_slab_2_mem(slab);
1636 }
else if (ak_unlikely(ntz == 512)) {
1637 ak_slab_unlink(slab);
1638 ak_slab_link(slab, root->full_root.fd, &(root->full_root));
1641 AK_SLAB_LOCK_RELEASE(root);
1652 char* mem = (
char*)p;
1655 ak_slab* slab = (ak_slab*)(ak_page_start_before(p));
1656 AKMALLOC_ASSERT(slab->root);
1658 ak_slab_root* root = slab->root;
1659 AK_SLAB_LOCK_ACQUIRE(root);
1661 int movetopartial = ak_slab_none_free(slab);
1662 const ak_sz sz = root->sz;
1664 int idx = (int)(mem - (
char*)ak_slab_2_mem(slab))/(
int)sz;
1665 AKMALLOC_ASSERT(!ak_bitset512_get(&(slab->avail), idx));
1666 ak_bitset512_set(&(slab->avail), idx);
1668 if (ak_unlikely(movetopartial)) {
1671 ak_slab_unlink(slab);
1672 ak_slab_link(slab, &(root->partial_root), root->partial_root.bk);
1673 }
else if (ak_unlikely(ak_slab_all_free(slab))) {
1674 ak_slab_unlink(slab);
1675 ak_slab_link(slab, root->empty_root.fd, &(root->empty_root));
1676 ++(root->nempty); ++(root->release);
1677 if (root->release >= root->RELEASE_RATE) {
1678 ak_slab_release_os_mem(root);
1682 AK_SLAB_LOCK_RELEASE(root);
1691 ak_slab_release_pages(root, &(root->empty_root), AK_U32_MAX);
1692 ak_slab_release_pages(root, &(root->partial_root), AK_U32_MAX);
1693 ak_slab_release_pages(root, &(root->full_root), AK_U32_MAX);
1761 #define AK_COALESCE_ALIGN 16
1763 #if !defined(AK_COALESCE_SEGMENT_GRANULARITY)
1764 # define AK_COALESCE_SEGMENT_GRANULARITY 65536
1767 #if !defined(AK_COALESCE_SEGMENT_SIZE)
1769 # define AK_COALESCE_SEGMENT_SIZE AK_COALESCE_SEGMENT_GRANULARITY
1772 #if defined(AK_CA_USE_LOCKS)
1773 # define AK_CA_LOCK_DEFINE(nm) ak_spinlock nm
1774 # define AK_CA_LOCK_INIT(root) ak_spinlock_init(ak_as_ptr((root)->LOCKED))
1775 # define AK_CA_LOCK_ACQUIRE(root) ak_spinlock_acquire(ak_as_ptr((root)->LOCKED))
1776 # define AK_CA_LOCK_RELEASE(root) ak_spinlock_release(ak_as_ptr((root)->LOCKED))
1778 # define AK_CA_LOCK_DEFINE(nm)
1779 # define AK_CA_LOCK_INIT(root)
1780 # define AK_CA_LOCK_ACQUIRE(root)
1781 # define AK_CA_LOCK_RELEASE(root)
1784 typedef ak_sz ak_alloc_info;
1786 typedef struct ak_alloc_node_tag ak_alloc_node;
1788 typedef struct ak_free_list_node_tag ak_free_list_node;
1790 typedef struct ak_ca_segment_tag ak_ca_segment;
1794 struct ak_alloc_node_tag
1796 #if AKMALLOC_BITNESS == 32
1797 ak_alloc_info _unused0;
1798 ak_alloc_info _unused1;
1800 ak_alloc_info previnfo;
1801 ak_alloc_info currinfo;
1804 struct ak_free_list_node_tag
1806 ak_free_list_node* bk;
1807 ak_free_list_node* fd;
1810 struct ak_ca_segment_tag
1815 ak_alloc_node* head;
1843 #define ak_ca_to_sz(p) (((ak_sz)(p)) & ~(AK_COALESCE_ALIGN - 1))
1845 #define ak_ca_is_first(p) (((ak_sz)(p)) & (AK_SZ_ONE << 0))
1847 #define ak_ca_is_last(p) (((ak_sz)(p)) & (AK_SZ_ONE << 1))
1849 #define ak_ca_is_free(p) (((ak_sz)(p)) & (AK_SZ_ONE << 2))
1851 ak_inline
static void ak_ca_set_sz(ak_alloc_info* p, ak_sz sz)
1853 AKMALLOC_ASSERT(sz == ak_ca_to_sz((ak_alloc_info)sz));
1855 *p = (ak_alloc_info)((((ak_sz)*p) & ((ak_sz)7)) |
1859 ak_inline
static void ak_ca_set_is_first(ak_alloc_info* p,
int v)
1861 *p = (ak_alloc_info)((((ak_sz)*p) & ~(AK_SZ_ONE << 0)) | (v ? (AK_SZ_ONE << 0) : 0));
1864 ak_inline
static void ak_ca_set_is_last(ak_alloc_info* p,
int v)
1866 *p = (ak_alloc_info)((((ak_sz)*p) & ~(AK_SZ_ONE << 1)) | (v ? (AK_SZ_ONE << 1) : 0));
1869 ak_inline
static void ak_ca_set_is_free(ak_alloc_info* p,
int v)
1871 *p = (ak_alloc_info)((((ak_sz)*p) & ~(AK_SZ_ONE << 2)) | (v ? (AK_SZ_ONE << 2) : 0));
1874 ak_inline
static ak_alloc_node* ak_ca_next_node(ak_alloc_node* node)
1876 return ak_ca_is_last(node->currinfo)
1878 : ak_ptr_cast(ak_alloc_node,((
char*)(node + 1) + ak_ca_to_sz(node->currinfo)));
1881 ak_inline
static ak_alloc_node* ak_ca_prev_node(ak_alloc_node* node)
1883 return ak_ca_is_first(node->currinfo)
1885 : ak_ptr_cast(ak_alloc_node, ((
char*)(node - 1) - ak_ca_to_sz(node->previnfo)));
1888 ak_inline
static void ak_ca_update_footer(ak_alloc_node* p)
1890 ak_alloc_node* n = ak_ca_next_node(p);
1892 n->previnfo = p->currinfo;
1896 #define ak_free_list_node_unlink(node) \
1898 ak_free_list_node* const sU = (node); \
1899 sU->bk->fd = (sU->fd); \
1900 sU->fd->bk = (sU->bk); \
1901 sU->fd = sU->bk = AK_NULLPTR; \
1904 #define ak_free_list_node_link_fd(node, fwd) \
1906 ak_free_list_node* const sLF = (node); \
1907 ak_free_list_node* const fLF = (fwd); \
1912 #define ak_free_list_node_link_bk(node, back) \
1914 ak_free_list_node* const sLB = (node); \
1915 ak_free_list_node* const bLB = (back); \
1920 #define ak_free_list_node_link(node, fwd, back) \
1922 ak_free_list_node* const sL = (node); \
1923 ak_free_list_node* const fL = (fwd); \
1924 ak_free_list_node* const bL = (back); \
1925 ak_free_list_node_link_bk(sL, bL); \
1926 ak_free_list_node_link_fd(sL, fL); \
1929 #define ak_ca_segment_unlink(node) \
1931 ak_ca_segment* const sU = (node); \
1932 sU->bk->fd = (sU->fd); \
1933 sU->fd->bk = (sU->bk); \
1934 sU->fd = sU->bk = AK_NULLPTR; \
1937 #define ak_ca_segment_link_fd(node, fwd) \
1939 ak_ca_segment* const sLF = (node); \
1940 ak_ca_segment* const fLF = (fwd); \
1945 #define ak_ca_segment_link_bk(node, back) \
1947 ak_ca_segment* const sLB = (node); \
1948 ak_ca_segment* const bLB = (back); \
1953 #define ak_ca_segment_link(node, fwd, back) \
1955 ak_ca_segment* const sL = (node); \
1956 ak_ca_segment* const fL = (fwd); \
1957 ak_ca_segment* const bL = (back); \
1958 ak_ca_segment_link_bk(sL, bL); \
1959 ak_ca_segment_link_fd(sL, fL); \
1962 #define ak_circ_list_for_each(type, name, list) \
1963 type* name = (list)->fd; \
1964 for(type* const iterroot = (list); name != iterroot; name = name->fd)
1966 #define ak_ca_aligned_size(x) ((x) ? (((x) + AK_COALESCE_ALIGN - 1) & ~(AK_COALESCE_ALIGN - 1)) : AK_COALESCE_ALIGN)
1968 #define ak_ca_aligned_segment_size(x) (((x) + (AK_COALESCE_SEGMENT_SIZE) - 1) & ~((AK_COALESCE_SEGMENT_SIZE) - 1))
1970 ak_inline
static void* ak_ca_search_free_list(ak_free_list_node* root, ak_sz sz, ak_sz splitsz)
1972 AKMALLOC_ASSERT(splitsz >=
sizeof(ak_free_list_node));
1976 splitsz +=
sizeof(ak_alloc_node);
1979 ak_circ_list_for_each(ak_free_list_node, node, root) {
1980 ak_alloc_node* n = ((ak_alloc_node*)(node)) - 1;
1981 AKMALLOC_ASSERT(ak_ca_is_free(n->currinfo));
1982 ak_sz nodesz = ak_ca_to_sz(n->currinfo);
1984 if ((nodesz - sz) > splitsz) {
1986 ak_alloc_node* newnode = ak_ptr_cast(ak_alloc_node, (((
char*)node) + sz));
1987 int islast = ak_ca_is_last(n->currinfo);
1989 ak_ca_set_sz(ak_as_ptr(n->currinfo), sz);
1990 ak_ca_set_is_last(ak_as_ptr(n->currinfo), 0);
1991 ak_ca_set_is_free(ak_as_ptr(n->currinfo), 0);
1992 ak_ca_update_footer(n);
1994 ak_ca_set_sz(ak_as_ptr(newnode->currinfo), nodesz - sz -
sizeof(ak_alloc_node));
1995 ak_ca_set_is_first(ak_as_ptr(newnode->currinfo), 0);
1996 ak_ca_set_is_last(ak_as_ptr(newnode->currinfo), islast);
1997 ak_ca_set_is_free(ak_as_ptr(newnode->currinfo), 1);
1998 ak_ca_update_footer(newnode);
2001 ak_free_list_node* fl = (ak_free_list_node*)(newnode + 1);
2002 ak_free_list_node_link(fl, node->fd, node->bk);
2003 AKMALLOC_ASSERT(n->currinfo == newnode->previnfo);
2006 ak_ca_set_is_free(ak_as_ptr(n->currinfo), 0);
2007 ak_ca_update_footer(n);
2008 ak_free_list_node_unlink(node);
2016 static int ak_ca_add_new_segment(ak_ca_root* root,
char* mem, ak_sz sz)
2018 if (ak_likely(mem)) {
2020 ak_ca_segment* seg = ak_ptr_cast(ak_ca_segment, (mem + sz -
sizeof(ak_ca_segment)));
2021 ak_ca_segment_link(seg, root->main_root.fd, ak_as_ptr(root->main_root));
2023 seg->head = ak_ptr_cast(ak_alloc_node, mem);
2025 ak_alloc_node* hd = seg->head;
2026 ak_sz actualsize = (sz -
sizeof(ak_alloc_node) -
sizeof(ak_ca_segment));
2028 hd->previnfo = actualsize;
2029 ak_ca_set_is_first(ak_as_ptr(hd->currinfo), 1);
2030 ak_ca_set_is_last(ak_as_ptr(hd->currinfo), 1);
2031 ak_ca_set_is_free(ak_as_ptr(hd->currinfo), 1);
2032 ak_ca_set_sz(ak_as_ptr(hd->currinfo), actualsize);
2033 ak_free_list_node* fl = (ak_free_list_node*)(hd + 1);
2034 ak_free_list_node_link(fl, root->free_root.fd, ak_as_ptr(root->free_root));
2041 static int ak_ca_get_new_segment(ak_ca_root* root, ak_sz sz)
2044 sz +=
sizeof(ak_ca_segment) +
sizeof(ak_alloc_node) +
sizeof(ak_free_list_node);
2045 sz = ak_ca_aligned_segment_size(sz);
2048 char* mem = AK_NULLPTR;
2050 ak_circ_list_for_each(ak_ca_segment, seg, ak_as_ptr(root->empty_root)) {
2051 if (seg->sz >= sz) {
2052 mem = (
char*)(seg->head);
2054 ak_ca_segment_unlink(seg);
2060 return ak_ca_add_new_segment(root, mem ? mem : ((
char*)ak_os_alloc(sz)), segsz);
2063 static ak_u32 ak_ca_return_os_mem(ak_ca_segment* r, ak_u32 num)
2066 ak_ca_segment* next = r->fd;
2067 ak_ca_segment* curr = next;
2068 for(; curr != r; curr = next) {
2073 ak_ca_segment_unlink(curr);
2074 ak_os_free(curr->head, curr->sz);
2092 AKMALLOC_ASSERT_ALWAYS(AK_COALESCE_SEGMENT_SIZE % AK_COALESCE_SEGMENT_GRANULARITY == 0);
2093 AKMALLOC_ASSERT_ALWAYS(((AK_COALESCE_SEGMENT_SIZE & (AK_COALESCE_SEGMENT_SIZE - 1)) == 0) &&
"Segment size must be a power of 2");
2095 ak_ca_segment_link(&(root->main_root), &(root->main_root), &(root->main_root));
2096 ak_ca_segment_link(&(root->empty_root), &(root->empty_root), &(root->empty_root));
2097 ak_free_list_node_link(&(root->free_root), &(root->free_root), &(root->free_root));
2098 root->nempty = root->release = 0;
2100 root->RELEASE_RATE = relrate;
2101 root->MAX_SEGMENTS_TO_FREE = maxsegstofree;
2103 AK_CA_LOCK_INIT(root);
2112 #if AKMALLOC_BITNESS == 32
2113 static const ak_u32 rate = 255;
2115 static const ak_u32 rate = 2047;
2130 void* retmem = AK_NULLPTR;
2132 ak_alloc_node* n = ak_ptr_cast(ak_alloc_node, mem) - 1;
2133 AKMALLOC_ASSERT(ak_ca_is_free(n->currinfo));
2135 ak_sz sz = ak_ca_to_sz(n->currinfo);
2137 ak_alloc_node* next = ak_ca_next_node(n);
2138 if (next && ak_ca_is_free(next->currinfo)) {
2139 AKMALLOC_ASSERT(n->currinfo == next->previnfo);
2140 ak_sz nextsz = ak_ca_to_sz(next->currinfo);
2141 ak_sz totalsz = nextsz + sz +
sizeof(ak_alloc_node);
2142 if (totalsz >= newsz) {
2143 AK_CA_LOCK_ACQUIRE(root);
2150 ak_free_list_node_unlink((ak_free_list_node*)(next + 1));
2152 if (ak_ca_is_last(next->currinfo)) {
2153 ak_ca_set_is_last(ak_as_ptr(n->currinfo), 1);
2155 ak_ca_set_sz(ak_as_ptr(n->currinfo), totalsz);
2156 ak_ca_update_footer(n);
2160 AK_CA_LOCK_RELEASE(root);
2177 ak_sz sz = ak_ca_aligned_size(s);
2178 AK_CA_LOCK_ACQUIRE(root);
2180 ak_sz splitsz = root->MIN_SIZE_TO_SPLIT;
2181 void* mem = ak_ca_search_free_list(ak_as_ptr(root->free_root), sz, splitsz);
2183 if (ak_unlikely(!mem)) {
2185 if (ak_likely(ak_ca_get_new_segment(root, sz))) {
2186 mem = ak_ca_search_free_list(ak_as_ptr(root->free_root), sz, splitsz);
2187 AKMALLOC_ASSERT(mem);
2190 AK_CA_LOCK_RELEASE(root);
2202 ak_alloc_node* node = ((ak_alloc_node*)m) - 1;
2204 AK_CA_LOCK_ACQUIRE(root);
2206 ak_alloc_node* nextnode = ak_ca_next_node(node);
2207 ak_alloc_node* prevnode = ak_ca_prev_node(node);
2211 AKMALLOC_ASSERT(!ak_ca_is_free(node->currinfo));
2212 AKMALLOC_ASSERT(!nextnode || (node->currinfo == nextnode->previnfo));
2213 ak_ca_set_is_free(ak_as_ptr(node->currinfo), 1);
2214 ak_ca_update_footer(node);
2218 if (prevnode && ak_ca_is_free(node->previnfo)) {
2221 ak_sz newsz = ak_ca_to_sz(node->previnfo) + ak_ca_to_sz(node->currinfo) +
sizeof(ak_alloc_node);
2222 ak_ca_set_sz(ak_as_ptr(prevnode->currinfo), newsz);
2223 ak_ca_set_is_last(ak_as_ptr(prevnode->currinfo), nextnode == AK_NULLPTR);
2224 ak_ca_update_footer(prevnode);
2225 AKMALLOC_ASSERT(!nextnode || ak_ca_next_node(prevnode) == nextnode);
2226 AKMALLOC_ASSERT(!nextnode || prevnode->currinfo == nextnode->previnfo);
2231 if (nextnode && ak_ca_is_free(nextnode->currinfo)) {
2234 ak_alloc_node* n = (coalesce) ? prevnode : node;
2235 ak_sz newsz = ak_ca_to_sz(n->currinfo) + ak_ca_to_sz(nextnode->currinfo) +
sizeof(ak_alloc_node);
2236 ak_ca_set_sz(ak_as_ptr(n->currinfo), newsz);
2237 ak_ca_set_is_last(ak_as_ptr(n->currinfo), ak_ca_is_last(nextnode->currinfo));
2238 ak_ca_update_footer(n);
2239 AKMALLOC_ASSERT(ak_ca_is_last(n->currinfo) || (n->currinfo == ak_ca_next_node(nextnode)->previnfo));
2244 ak_alloc_node* tocheck = AK_NULLPTR;
2248 ak_free_list_node* fl = (ak_free_list_node*)(node + 1);
2249 ak_free_list_node_link(fl, root->free_root.fd, ak_as_ptr(root->free_root));
2259 ak_free_list_node* fl = (ak_free_list_node*)(node + 1);
2260 ak_free_list_node* nextfl = (ak_free_list_node*)(nextnode + 1);
2261 ak_free_list_node_link(fl, nextfl->fd, nextfl->bk);
2266 ak_free_list_node* nextfl = (ak_free_list_node*)(nextnode + 1);
2267 ak_free_list_node_unlink(nextfl);
2272 AKMALLOC_ASSERT_ALWAYS(0 &&
"Should not get here!");
2278 if (tocheck && ak_ca_is_first(tocheck->currinfo) && ak_ca_is_last(tocheck->currinfo)) {
2280 ak_free_list_node* fl = (ak_free_list_node*)(tocheck + 1);
2281 ak_free_list_node_unlink(fl);
2283 AKMALLOC_ASSERT(tocheck->previnfo == ak_ca_to_sz(tocheck->currinfo));
2284 ak_ca_segment* seg = ak_ptr_cast(ak_ca_segment, ((
char*)(tocheck + 1) + tocheck->previnfo));
2285 AKMALLOC_ASSERT(tocheck->previnfo == (seg->sz -
sizeof(ak_alloc_node) -
sizeof(ak_ca_segment)));
2286 ak_ca_segment_unlink(seg);
2287 ak_ca_segment_link(seg, root->empty_root.fd, ak_as_ptr(root->empty_root));
2288 ++(root->nempty); ++(root->release);
2290 if (root->release >= root->RELEASE_RATE) {
2292 ak_u32 nrem = ak_ca_return_os_mem(ak_as_ptr(root->empty_root), root->MAX_SEGMENTS_TO_FREE);
2293 root->nempty -= nrem;
2298 AK_CA_LOCK_RELEASE(root);
2307 ak_ca_return_os_mem(ak_as_ptr(root->main_root), AK_U32_MAX);
2308 ak_ca_return_os_mem(ak_as_ptr(root->empty_root), AK_U32_MAX);
2309 root->nempty = root->release = 0;
2436 static void* ak_memset(
void* m,
int v, ak_sz sz)
2438 char* mem = (
char*)m;
2439 for (ak_sz i = 0; i != sz; ++i) {
2445 static void ak_memcpy(
void* d,
const void* s, ak_sz sz)
2447 char* mem = (
char*)d;
2448 const char* srcmem = (
const char*)s;
2449 for (ak_sz i = 0; i != sz; ++i) {
2461 #define ak_alloc_type_bits(p) \
2462 ((*(((const ak_sz*)(p)) - 1)) & (AK_COALESCE_ALIGN - 1))
2464 #define ak_alloc_type_coalesce(sz) \
2465 ((((ak_sz)sz) & ((ak_sz)8)) == 0)
2467 #define ak_alloc_type_slab(sz) \
2468 ((((ak_sz)sz) & (AK_COALESCE_ALIGN - 1)) == 10)
2470 #define ak_alloc_type_mmap(sz) \
2471 ((((ak_sz)sz) & (AK_COALESCE_ALIGN - 1)) == 9)
2473 #define ak_alloc_mark_coalesce(p) ((void)(p))
2475 #define ak_alloc_mark_slab(p) \
2476 *(((ak_sz*)(p)) - 1) = ((ak_sz)10)
2478 #define ak_alloc_mark_mmap(p) \
2479 *(((ak_sz*)(p)) - 1) = ((ak_sz)9)
2481 #if defined(AK_MIN_SLAB_ALIGN_16)
2482 # define ak_slab_mod_sz(x) (ak_ca_aligned_size((x)) + AK_COALESCE_ALIGN)
2483 # define ak_slab_alloc_2_mem(x) (((ak_sz*)x) + (AK_COALESCE_ALIGN / sizeof(ak_sz)))
2484 # define ak_slab_mem_2_alloc(x) (((ak_sz*)x) - (AK_COALESCE_ALIGN / sizeof(ak_sz)))
2485 # define ak_slab_usable_size(x) ((x) - AK_COALESCE_ALIGN)
2487 # define ak_slab_mod_sz(x) (ak_ca_aligned_size((x) + sizeof(ak_sz)))
2488 # define ak_slab_alloc_2_mem(x) (((ak_sz*)x) + 1)
2489 # define ak_slab_mem_2_alloc(x) (((ak_sz*)x) - 1)
2490 # define ak_slab_usable_size(x) ((x) - sizeof(ak_sz))
2494 #define MIN_SMALL_REQUEST 256
2496 #if !defined(MMAP_SIZE)
2497 # if !AKMALLOC_WINDOWS
2498 # define MMAP_SIZE (AK_SZ_ONE << 20)
2504 # define MMAP_SIZE AK_SZ_MAX
2515 16, 32, 48, 64, 80, 96, 112, 128,
2516 144, 160, 176, 192, 208, 224, 240, 256
2527 768, 1408, 2048, 4096, 8192, 16384, 65536, MMAP_SIZE
2545 #if !defined(AKMALLOC_COALESCING_ALLOC_RELEASE_RATE)
2546 # define AKMALLOC_COALESCING_ALLOC_RELEASE_RATE 24
2549 #if !defined(AKMALLOC_COALESCING_ALLOC_MAX_PAGES_TO_FREE)
2550 # define AKMALLOC_COALESCING_ALLOC_MAX_PAGES_TO_FREE AKMALLOC_COALESCING_ALLOC_RELEASE_RATE
2553 static void ak_try_reclaim_memory(ak_malloc_state* m)
2556 for (ak_sz i = 0; i < NSLABS; ++i) {
2557 ak_slab_root* s = ak_as_ptr(m->slabs[i]);
2558 ak_slab_release_pages(s, ak_as_ptr(s->empty_root), AK_U32_MAX);
2563 for (ak_sz i = 0; i < NCAROOTS; ++i) {
2564 ak_ca_root* ca = ak_as_ptr(m->ca[i]);
2565 ak_ca_return_os_mem(ak_as_ptr(ca->empty_root), AK_U32_MAX);
2574 ak_inline
static void* ak_try_slab_alloc(ak_malloc_state* m,
size_t sz)
2577 ak_sz idx = (sz >> 4) - 1;
2578 ak_sz* mem = (ak_sz*)
ak_slab_alloc(ak_as_ptr(m->slabs[idx]));
2579 if (ak_likely(mem)) {
2580 ak_alloc_mark_slab(ak_slab_alloc_2_mem(mem));
2581 AKMALLOC_ASSERT(ak_alloc_type_slab(ak_alloc_type_bits(ak_slab_alloc_2_mem(mem))));
2582 mem = ak_slab_alloc_2_mem(mem);
2587 ak_inline
static void* ak_try_coalesce_alloc(ak_malloc_state* m, ak_ca_root* proot,
size_t sz)
2590 if (ak_likely(mem)) {
2591 ak_alloc_mark_coalesce(mem);
2592 AKMALLOC_ASSERT(ak_alloc_type_coalesce(ak_alloc_type_bits(mem)));
2597 ak_inline
static void* ak_try_alloc_mmap(ak_malloc_state* m,
size_t sz)
2599 AKMALLOC_LOCK_ACQUIRE(ak_as_ptr(m->MAP_LOCK));
2600 ak_ca_segment* mem = (ak_ca_segment*)ak_os_alloc(sz);
2601 if (ak_likely(mem)) {
2602 ak_alloc_mark_mmap(mem + 1);
2603 AKMALLOC_ASSERT(ak_alloc_type_mmap(ak_alloc_type_bits(mem + 1)));
2605 ak_ca_segment_link(mem, m->map_root.fd, ak_as_ptr(m->map_root));
2608 AKMALLOC_LOCK_RELEASE(ak_as_ptr(m->MAP_LOCK));
2613 ak_inline
static ak_ca_root* ak_find_ca_root(ak_malloc_state* m, ak_sz sz)
2616 for (; i < NCAROOTS; ++i) {
2617 if (CA_SIZES[i] >= sz) {
2621 AKMALLOC_ASSERT(i < NCAROOTS);
2622 return ak_as_ptr(m->ca[i]);
2625 ak_inline
static void* ak_try_alloc(ak_malloc_state* m,
size_t sz)
2627 void* retmem = AK_NULLPTR;
2628 ak_sz modsz = ak_slab_mod_sz(sz);
2629 if (modsz <= MIN_SMALL_REQUEST) {
2630 retmem = ak_try_slab_alloc(m, modsz);
2631 DBG_PRINTF(
"a,slab,%p,%llu\n", retmem, modsz);
2632 }
else if (sz < MMAP_SIZE) {
2633 const ak_sz alnsz = ak_ca_aligned_size(sz);
2634 ak_ca_root* proot = ak_find_ca_root(m, sz);
2635 retmem = ak_try_coalesce_alloc(m, proot, alnsz);
2636 DBG_PRINTF(
"a,ca[%d],%p,%llu\n", (
int)(proot-ak_as_ptr(m->ca[0])), retmem, alnsz);
2638 sz +=
sizeof(ak_ca_segment);
2639 const ak_sz actsz = ak_ca_aligned_segment_size(sz);
2640 retmem = ak_try_alloc_mmap(m, actsz);
2641 DBG_PRINTF(
"a,mmap,%p,%llu\n", retmem, actsz);
2648 static void* ak_aligned_alloc_from_state_no_checks(ak_malloc_state* m,
size_t aln,
size_t sz)
2650 ak_sz req = ak_ca_aligned_size(sz);
2651 req += aln + (2 *
sizeof(ak_alloc_node)) + (2 *
sizeof(ak_free_list_node));
2652 char* mem = AK_NULLPTR;
2654 ak_ca_root* ca = ak_find_ca_root(m, req);
2657 ak_alloc_node* node = ak_ptr_cast(ak_alloc_node, mem) - 1;
2658 if (ak_likely(mem)) {
2659 if ((((ak_sz)mem) & (aln - 1)) != 0) {
2661 AK_CA_LOCK_ACQUIRE(ca);
2662 char* alnpos = (
char*)(((ak_sz)(mem + aln - 1)) & ~(aln - 1));
2663 ak_alloc_node* alnnode = ak_ptr_cast(ak_alloc_node, alnpos) - 1;
2664 AKMALLOC_ASSERT((ak_sz)(alnpos - mem) >= (
sizeof(ak_free_list_node) +
sizeof(ak_alloc_node)));
2665 ak_sz actsz = ak_ca_to_sz(node->currinfo);
2666 int islast = ak_ca_is_last(node);
2668 ak_ca_set_sz(ak_as_ptr(node->currinfo), alnpos - mem -
sizeof(ak_alloc_node));
2669 ak_ca_set_is_last(ak_as_ptr(node->currinfo), 0);
2670 ak_ca_set_is_free(ak_as_ptr(node->currinfo), 1);
2672 ak_ca_set_sz(ak_as_ptr(alnnode->currinfo), actsz - ak_ca_to_sz(node->currinfo));
2673 ak_ca_set_is_last(ak_as_ptr(alnnode->currinfo), islast);
2674 ak_ca_set_is_free(ak_as_ptr(alnnode->currinfo), 0);
2676 ak_ca_update_footer(node);
2677 ak_ca_update_footer(alnnode);
2679 AK_CA_LOCK_RELEASE(ca);
2700 for (ak_sz i = 0; i != NSLABS; ++i) {
2704 for (ak_sz i = 0; i != NCAROOTS; ++i) {
2705 ak_ca_init_root(ak_as_ptr(s->ca[i]), AKMALLOC_COALESCING_ALLOC_RELEASE_RATE, AKMALLOC_COALESCING_ALLOC_MAX_PAGES_TO_FREE);
2708 ak_ca_segment_link(ak_as_ptr(s->map_root), ak_as_ptr(s->map_root), ak_as_ptr(s->map_root));
2710 AKMALLOC_LOCK_INIT(ak_as_ptr(s->MAP_LOCK));
2720 for (ak_sz i = 0; i < NSLABS; ++i) {
2723 for (ak_sz i = 0; i < NCAROOTS; ++i) {
2728 ak_circ_list_for_each(ak_ca_segment, seg, &(m->map_root)) {
2730 ak_os_free(seg, seg->sz);
2745 AKMALLOC_ASSERT(m->init);
2746 void* mem = ak_try_alloc(m, sz);
2747 if (ak_unlikely(!mem)) {
2748 ak_try_reclaim_memory(m);
2749 mem = ak_try_alloc(m, sz);
2761 if (ak_likely(mem)) {
2762 #if defined(AKMALLOC_DEBUG_PRINT)
2765 ak_sz ty = ak_alloc_type_bits(mem);
2766 if (ak_alloc_type_slab(ty)) {
2767 DBG_PRINTF(
"d,slab,%p,%llu\n", mem, ussize);
2769 }
else if (ak_alloc_type_mmap(ty)) {
2770 DBG_PRINTF(
"d,mmap,%p,%llu\n", mem, ussize);
2771 AKMALLOC_LOCK_ACQUIRE(ak_as_ptr(m->MAP_LOCK));
2772 ak_ca_segment* seg = ((ak_ca_segment*)mem) - 1;
2773 ak_ca_segment_unlink(seg);
2774 ak_os_free(seg, seg->sz);
2775 AKMALLOC_LOCK_RELEASE(ak_as_ptr(m->MAP_LOCK));
2777 AKMALLOC_ASSERT(ak_alloc_type_coalesce(ty));
2778 const ak_alloc_node* n = ((
const ak_alloc_node*)mem) - 1;
2779 const ak_sz alnsz = ak_ca_to_sz(n->currinfo);
2780 ak_ca_root* proot = ak_find_ca_root(m, alnsz);
2781 DBG_PRINTF(
"d,ca[%d],%p,%llu\n", (
int)(proot-ak_as_ptr(m->ca[0])), mem, alnsz);
2798 if (usablesize >= newsz) {
2801 if (ak_alloc_type_coalesce(ak_alloc_type_bits(mem))) {
2802 ak_alloc_node* n = ak_ptr_cast(ak_alloc_node, mem) - 1;
2803 AKMALLOC_ASSERT(ak_ca_is_free(n->currinfo));
2805 ak_sz sz = ak_ca_to_sz(n->currinfo);
2806 ak_ca_root* proot = ak_find_ca_root(m, sz);
2828 if (ak_unlikely(!mem)) {
2840 if (ak_likely(mem)) {
2856 if (ak_likely(mem)) {
2857 ak_sz ty = ak_alloc_type_bits(mem);
2858 if (ak_alloc_type_slab(ty)) {
2860 const ak_slab* slab = (
const ak_slab*)(ak_page_start_before_const(mem));
2861 return ak_slab_usable_size(slab->root->sz);
2862 }
else if (ak_alloc_type_mmap(ty)) {
2863 return (((
const ak_ca_segment*)mem) - 1)->sz -
sizeof(ak_ca_segment);
2865 AKMALLOC_ASSERT(ak_alloc_type_coalesce(ty));
2866 const ak_alloc_node* n = ((
const ak_alloc_node*)mem) - 1;
2867 AKMALLOC_ASSERT(!ak_ca_is_free(n->currinfo));
2868 return ak_ca_to_sz(n->currinfo);
2889 if ((aln & AK_SZ_ONE) || (aln & (aln - 1))) {
2896 return ak_aligned_alloc_from_state_no_checks(m, aln, sz);
2899 #define AK_EINVAL 22
2900 #define AK_ENOMEM 12
2916 void* mem = AK_NULLPTR;
2920 ak_sz div = (aln /
sizeof(ak_sz));
2921 ak_sz rem = (aln & (
sizeof(ak_sz)));
2922 if (rem != 0 || div == 0 || (div & (div - AK_SZ_ONE)) != 0) {
2926 mem = ak_aligned_alloc_from_state_no_checks(m, aln, sz);
2945 for (ak_sz i = 0; i < NSLABS; ++i) {
2946 ak_slab_root* s = ak_as_ptr(m->slabs[i]);
2947 ak_circ_list_for_each(ak_slab, fslab, &(s->full_root)) {
2948 if (!cbk(fslab, AKMALLOC_DEFAULT_PAGE_SIZE)) {
2952 ak_circ_list_for_each(ak_slab, pslab, &(s->partial_root)) {
2953 if (!cbk(pslab, AKMALLOC_DEFAULT_PAGE_SIZE)) {
2960 for (ak_sz i = 0; i < NCAROOTS; ++i) {
2961 ak_circ_list_for_each(ak_ca_segment, seg, &(m->ca[i].main_root)) {
2962 if (!cbk(seg->head, seg->sz)) {
2970 ak_circ_list_for_each(ak_ca_segment, seg, &(m->map_root)) {
2971 if (!cbk(seg, seg->sz)) {
2983 static int MALLOC_INIT = 0;
2985 static ak_malloc_state MALLOC_ROOT;
2986 static ak_malloc_state* GMSTATE = AK_NULLPTR;
2987 static ak_spinlock MALLOC_INIT_LOCK = { 0 };
2989 #define ak_ensure_malloc_state_init() \
2991 if (ak_unlikely(!MALLOC_INIT)) { \
2992 AKMALLOC_LOCK_ACQUIRE(ak_as_ptr(MALLOC_INIT_LOCK)); \
2993 if (MALLOC_INIT != 1) { \
2994 GMSTATE = &MALLOC_ROOT; \
2995 ak_malloc_init_state(GMSTATE); \
2998 AKMALLOC_LOCK_RELEASE(ak_as_ptr(MALLOC_INIT_LOCK)); \
3000 AKMALLOC_ASSERT(MALLOC_ROOT.init); \
3007 ak_ensure_malloc_state_init();
3013 const ak_sz sz = elsz*numel;
3015 return ak_memset(mem, 0, sz);
3020 ak_ensure_malloc_state_init();
3026 ak_ensure_malloc_state_init();
3032 ak_ensure_malloc_state_init();
3038 ak_ensure_malloc_state_init();
3049 ak_ensure_malloc_state_init();
3053 void* ak_realloc_in_place(
void* mem,
size_t newsz)
3055 ak_ensure_malloc_state_init();
3061 ak_ensure_malloc_state_init();
ak_free_list_node free_root
void * ak_memalign(size_t sz, size_t aln)
static void * ak_realloc_in_place_from_state(ak_malloc_state *m, void *mem, size_t newsz)
static const ak_sz SLAB_SIZES[16]
static void * ak_ca_alloc(ak_ca_root *root, ak_sz s)
size_t ak_malloc_usable_size(const void *mem)
static void * ak_aligned_alloc_from_state(ak_malloc_state *m, size_t aln, size_t sz)
static void * ak_realloc_from_state(ak_malloc_state *m, void *mem, size_t newsz)
ak_u32 MAX_SEGMENTS_TO_FREE
void * ak_calloc(size_t elsz, size_t numel)
static void ak_free_to_state(ak_malloc_state *m, void *mem)
void * ak_malloc(size_t sz)
static void ak_malloc_for_each_segment_in_state(ak_malloc_state *m, ak_seg_cbk cbk)
static void * ak_ca_realloc_in_place(ak_ca_root *root, void *mem, ak_sz newsz)
static void ak_malloc_init_state(ak_malloc_state *s)
int ak_posix_memalign(void **pmem, size_t aln, size_t sz)
static void ak_slab_init_root(ak_slab_root *s, ak_sz sz, ak_u32 npages, ak_u32 relrate, ak_u32 maxpagefree)
static void * ak_malloc_from_state(ak_malloc_state *m, size_t sz)
#define AK_COALESCE_ALIGN
static void * ak_slab_alloc(ak_slab_root *root)
static void ak_ca_init_root(ak_ca_root *root, ak_u32 relrate, ak_u32 maxsegstofree)
static void ak_ca_init_root_default(ak_ca_root *root)
int(* ak_seg_cbk)(const void *, size_t)
static void ak_slab_init_root_default(ak_slab_root *s, ak_sz sz)
void * ak_realloc(void *mem, size_t newsz)
static int ak_posix_memalign_from_state(ak_malloc_state *m, void **pmem, size_t aln, size_t sz)
static void ak_ca_free(ak_ca_root *root, void *m)
static void ak_ca_destroy(ak_ca_root *root)
static void ak_slab_free(void *p)
static const ak_sz CA_SIZES[8]
static void ak_malloc_destroy_state(ak_malloc_state *m)
static size_t ak_malloc_usable_size_in_state(const void *mem)
void ak_malloc_for_each_segment(ak_seg_cbk cbk)
static void ak_slab_destroy(ak_slab_root *root)
void * ak_aligned_alloc(size_t sz, size_t aln)