batch_alloc.c 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190
  1. #include "test/jemalloc_test.h"
  2. #define BATCH_MAX ((1U << 16) + 1024)
  3. static void *global_ptrs[BATCH_MAX];
  4. #define PAGE_ALIGNED(ptr) (((uintptr_t)ptr & PAGE_MASK) == 0)
  5. static void
  6. verify_batch_basic(tsd_t *tsd, void **ptrs, size_t batch, size_t usize,
  7. bool zero) {
  8. for (size_t i = 0; i < batch; ++i) {
  9. void *p = ptrs[i];
  10. expect_zu_eq(isalloc(tsd_tsdn(tsd), p), usize, "");
  11. if (zero) {
  12. for (size_t k = 0; k < usize; ++k) {
  13. expect_true(*((unsigned char *)p + k) == 0, "");
  14. }
  15. }
  16. }
  17. }
  18. static void
  19. verify_batch_locality(tsd_t *tsd, void **ptrs, size_t batch, size_t usize,
  20. arena_t *arena, unsigned nregs) {
  21. if (config_prof && opt_prof) {
  22. /*
  23. * Checking batch locality when prof is on is feasible but
  24. * complicated, while checking the non-prof case suffices for
  25. * unit-test purpose.
  26. */
  27. return;
  28. }
  29. for (size_t i = 0, j = 0; i < batch; ++i, ++j) {
  30. if (j == nregs) {
  31. j = 0;
  32. }
  33. if (j == 0 && batch - i < nregs) {
  34. break;
  35. }
  36. void *p = ptrs[i];
  37. expect_ptr_eq(iaalloc(tsd_tsdn(tsd), p), arena, "");
  38. if (j == 0) {
  39. expect_true(PAGE_ALIGNED(p), "");
  40. continue;
  41. }
  42. assert(i > 0);
  43. void *q = ptrs[i - 1];
  44. expect_true((uintptr_t)p > (uintptr_t)q
  45. && (size_t)((uintptr_t)p - (uintptr_t)q) == usize, "");
  46. }
  47. }
  48. static void
  49. release_batch(void **ptrs, size_t batch, size_t size) {
  50. for (size_t i = 0; i < batch; ++i) {
  51. sdallocx(ptrs[i], size, 0);
  52. }
  53. }
  54. typedef struct batch_alloc_packet_s batch_alloc_packet_t;
  55. struct batch_alloc_packet_s {
  56. void **ptrs;
  57. size_t num;
  58. size_t size;
  59. int flags;
  60. };
  61. static size_t
  62. batch_alloc_wrapper(void **ptrs, size_t num, size_t size, int flags) {
  63. batch_alloc_packet_t batch_alloc_packet = {ptrs, num, size, flags};
  64. size_t filled;
  65. size_t len = sizeof(size_t);
  66. assert_d_eq(mallctl("experimental.batch_alloc", &filled, &len,
  67. &batch_alloc_packet, sizeof(batch_alloc_packet)), 0, "");
  68. return filled;
  69. }
  70. static void
  71. test_wrapper(size_t size, size_t alignment, bool zero, unsigned arena_flag) {
  72. tsd_t *tsd = tsd_fetch();
  73. assert(tsd != NULL);
  74. const size_t usize =
  75. (alignment != 0 ? sz_sa2u(size, alignment) : sz_s2u(size));
  76. const szind_t ind = sz_size2index(usize);
  77. const bin_info_t *bin_info = &bin_infos[ind];
  78. const unsigned nregs = bin_info->nregs;
  79. assert(nregs > 0);
  80. arena_t *arena;
  81. if (arena_flag != 0) {
  82. arena = arena_get(tsd_tsdn(tsd), MALLOCX_ARENA_GET(arena_flag),
  83. false);
  84. } else {
  85. arena = arena_choose(tsd, NULL);
  86. }
  87. assert(arena != NULL);
  88. int flags = arena_flag;
  89. if (alignment != 0) {
  90. flags |= MALLOCX_ALIGN(alignment);
  91. }
  92. if (zero) {
  93. flags |= MALLOCX_ZERO;
  94. }
  95. /*
  96. * Allocate for the purpose of bootstrapping arena_tdata, so that the
  97. * change in bin stats won't contaminate the stats to be verified below.
  98. */
  99. void *p = mallocx(size, flags | MALLOCX_TCACHE_NONE);
  100. for (size_t i = 0; i < 4; ++i) {
  101. size_t base = 0;
  102. if (i == 1) {
  103. base = nregs;
  104. } else if (i == 2) {
  105. base = nregs * 2;
  106. } else if (i == 3) {
  107. base = (1 << 16);
  108. }
  109. for (int j = -1; j <= 1; ++j) {
  110. if (base == 0 && j == -1) {
  111. continue;
  112. }
  113. size_t batch = base + (size_t)j;
  114. assert(batch < BATCH_MAX);
  115. size_t filled = batch_alloc_wrapper(global_ptrs, batch,
  116. size, flags);
  117. assert_zu_eq(filled, batch, "");
  118. verify_batch_basic(tsd, global_ptrs, batch, usize,
  119. zero);
  120. verify_batch_locality(tsd, global_ptrs, batch, usize,
  121. arena, nregs);
  122. release_batch(global_ptrs, batch, usize);
  123. }
  124. }
  125. free(p);
  126. }
  127. TEST_BEGIN(test_batch_alloc) {
  128. test_wrapper(11, 0, false, 0);
  129. }
  130. TEST_END
  131. TEST_BEGIN(test_batch_alloc_zero) {
  132. test_wrapper(11, 0, true, 0);
  133. }
  134. TEST_END
  135. TEST_BEGIN(test_batch_alloc_aligned) {
  136. test_wrapper(7, 16, false, 0);
  137. }
  138. TEST_END
  139. TEST_BEGIN(test_batch_alloc_manual_arena) {
  140. unsigned arena_ind;
  141. size_t len_unsigned = sizeof(unsigned);
  142. assert_d_eq(mallctl("arenas.create", &arena_ind, &len_unsigned, NULL,
  143. 0), 0, "");
  144. test_wrapper(11, 0, false, MALLOCX_ARENA(arena_ind));
  145. }
  146. TEST_END
  147. TEST_BEGIN(test_batch_alloc_large) {
  148. size_t size = SC_LARGE_MINCLASS;
  149. for (size_t batch = 0; batch < 4; ++batch) {
  150. assert(batch < BATCH_MAX);
  151. size_t filled = batch_alloc(global_ptrs, batch, size, 0);
  152. assert_zu_eq(filled, batch, "");
  153. release_batch(global_ptrs, batch, size);
  154. }
  155. size = tcache_maxclass + 1;
  156. for (size_t batch = 0; batch < 4; ++batch) {
  157. assert(batch < BATCH_MAX);
  158. size_t filled = batch_alloc(global_ptrs, batch, size, 0);
  159. assert_zu_eq(filled, batch, "");
  160. release_batch(global_ptrs, batch, size);
  161. }
  162. }
  163. TEST_END
  164. int
  165. main(void) {
  166. return test(
  167. test_batch_alloc,
  168. test_batch_alloc_zero,
  169. test_batch_alloc_aligned,
  170. test_batch_alloc_manual_arena,
  171. test_batch_alloc_large);
  172. }