mallctl.c 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275
  1. #include "test/jemalloc_test.h"
  2. #include "jemalloc/internal/ctl.h"
  3. #include "jemalloc/internal/hook.h"
  4. #include "jemalloc/internal/util.h"
  5. TEST_BEGIN(test_mallctl_errors) {
  6. uint64_t epoch;
  7. size_t sz;
  8. expect_d_eq(mallctl("no_such_name", NULL, NULL, NULL, 0), ENOENT,
  9. "mallctl() should return ENOENT for non-existent names");
  10. expect_d_eq(mallctl("version", NULL, NULL, "0.0.0", strlen("0.0.0")),
  11. EPERM, "mallctl() should return EPERM on attempt to write "
  12. "read-only value");
  13. expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch,
  14. sizeof(epoch)-1), EINVAL,
  15. "mallctl() should return EINVAL for input size mismatch");
  16. expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&epoch,
  17. sizeof(epoch)+1), EINVAL,
  18. "mallctl() should return EINVAL for input size mismatch");
  19. sz = sizeof(epoch)-1;
  20. expect_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL,
  21. "mallctl() should return EINVAL for output size mismatch");
  22. sz = sizeof(epoch)+1;
  23. expect_d_eq(mallctl("epoch", (void *)&epoch, &sz, NULL, 0), EINVAL,
  24. "mallctl() should return EINVAL for output size mismatch");
  25. }
  26. TEST_END
  27. TEST_BEGIN(test_mallctlnametomib_errors) {
  28. size_t mib[1];
  29. size_t miblen;
  30. miblen = sizeof(mib)/sizeof(size_t);
  31. expect_d_eq(mallctlnametomib("no_such_name", mib, &miblen), ENOENT,
  32. "mallctlnametomib() should return ENOENT for non-existent names");
  33. }
  34. TEST_END
  35. TEST_BEGIN(test_mallctlbymib_errors) {
  36. uint64_t epoch;
  37. size_t sz;
  38. size_t mib[1];
  39. size_t miblen;
  40. miblen = sizeof(mib)/sizeof(size_t);
  41. expect_d_eq(mallctlnametomib("version", mib, &miblen), 0,
  42. "Unexpected mallctlnametomib() failure");
  43. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, "0.0.0",
  44. strlen("0.0.0")), EPERM, "mallctl() should return EPERM on "
  45. "attempt to write read-only value");
  46. miblen = sizeof(mib)/sizeof(size_t);
  47. expect_d_eq(mallctlnametomib("epoch", mib, &miblen), 0,
  48. "Unexpected mallctlnametomib() failure");
  49. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, (void *)&epoch,
  50. sizeof(epoch)-1), EINVAL,
  51. "mallctlbymib() should return EINVAL for input size mismatch");
  52. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, (void *)&epoch,
  53. sizeof(epoch)+1), EINVAL,
  54. "mallctlbymib() should return EINVAL for input size mismatch");
  55. sz = sizeof(epoch)-1;
  56. expect_d_eq(mallctlbymib(mib, miblen, (void *)&epoch, &sz, NULL, 0),
  57. EINVAL,
  58. "mallctlbymib() should return EINVAL for output size mismatch");
  59. sz = sizeof(epoch)+1;
  60. expect_d_eq(mallctlbymib(mib, miblen, (void *)&epoch, &sz, NULL, 0),
  61. EINVAL,
  62. "mallctlbymib() should return EINVAL for output size mismatch");
  63. }
  64. TEST_END
  65. TEST_BEGIN(test_mallctl_read_write) {
  66. uint64_t old_epoch, new_epoch;
  67. size_t sz = sizeof(old_epoch);
  68. /* Blind. */
  69. expect_d_eq(mallctl("epoch", NULL, NULL, NULL, 0), 0,
  70. "Unexpected mallctl() failure");
  71. expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
  72. /* Read. */
  73. expect_d_eq(mallctl("epoch", (void *)&old_epoch, &sz, NULL, 0), 0,
  74. "Unexpected mallctl() failure");
  75. expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
  76. /* Write. */
  77. expect_d_eq(mallctl("epoch", NULL, NULL, (void *)&new_epoch,
  78. sizeof(new_epoch)), 0, "Unexpected mallctl() failure");
  79. expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
  80. /* Read+write. */
  81. expect_d_eq(mallctl("epoch", (void *)&old_epoch, &sz,
  82. (void *)&new_epoch, sizeof(new_epoch)), 0,
  83. "Unexpected mallctl() failure");
  84. expect_zu_eq(sz, sizeof(old_epoch), "Unexpected output size");
  85. }
  86. TEST_END
  87. TEST_BEGIN(test_mallctlnametomib_short_mib) {
  88. size_t mib[4];
  89. size_t miblen;
  90. miblen = 3;
  91. mib[3] = 42;
  92. expect_d_eq(mallctlnametomib("arenas.bin.0.nregs", mib, &miblen), 0,
  93. "Unexpected mallctlnametomib() failure");
  94. expect_zu_eq(miblen, 3, "Unexpected mib output length");
  95. expect_zu_eq(mib[3], 42,
  96. "mallctlnametomib() wrote past the end of the input mib");
  97. }
  98. TEST_END
  99. TEST_BEGIN(test_mallctlnametomib_short_name) {
  100. size_t mib[4];
  101. size_t miblen;
  102. miblen = 4;
  103. mib[3] = 42;
  104. expect_d_eq(mallctlnametomib("arenas.bin.0", mib, &miblen), 0,
  105. "Unexpected mallctlnametomib() failure");
  106. expect_zu_eq(miblen, 3, "Unexpected mib output length");
  107. expect_zu_eq(mib[3], 42,
  108. "mallctlnametomib() wrote past the end of the input mib");
  109. }
  110. TEST_END
  111. TEST_BEGIN(test_mallctlmibnametomib) {
  112. size_t mib[4];
  113. size_t miblen = 4;
  114. uint32_t result, result_ref;
  115. size_t len_result = sizeof(uint32_t);
  116. tsd_t *tsd = tsd_fetch();
  117. /* Error cases */
  118. assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "bob", &miblen), ENOENT, "");
  119. assert_zu_eq(miblen, 4, "");
  120. assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "9999", &miblen), ENOENT, "");
  121. assert_zu_eq(miblen, 4, "");
  122. /* Valid case. */
  123. assert_d_eq(ctl_mibnametomib(tsd, mib, 0, "arenas", &miblen), 0, "");
  124. assert_zu_eq(miblen, 1, "");
  125. miblen = 4;
  126. assert_d_eq(ctl_mibnametomib(tsd, mib, 1, "bin", &miblen), 0, "");
  127. assert_zu_eq(miblen, 2, "");
  128. expect_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
  129. ENOENT, "mallctlbymib() should fail on partial path");
  130. /* Error cases. */
  131. miblen = 4;
  132. assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "bob", &miblen), ENOENT, "");
  133. assert_zu_eq(miblen, 4, "");
  134. assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "9999", &miblen), ENOENT, "");
  135. assert_zu_eq(miblen, 4, "");
  136. /* Valid case. */
  137. assert_d_eq(ctl_mibnametomib(tsd, mib, 2, "0", &miblen), 0, "");
  138. assert_zu_eq(miblen, 3, "");
  139. expect_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
  140. ENOENT, "mallctlbymib() should fail on partial path");
  141. /* Error cases. */
  142. miblen = 4;
  143. assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "bob", &miblen), ENOENT, "");
  144. assert_zu_eq(miblen, 4, "");
  145. assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "9999", &miblen), ENOENT, "");
  146. assert_zu_eq(miblen, 4, "");
  147. /* Valid case. */
  148. assert_d_eq(ctl_mibnametomib(tsd, mib, 3, "nregs", &miblen), 0, "");
  149. assert_zu_eq(miblen, 4, "");
  150. assert_d_eq(mallctlbymib(mib, miblen, &result, &len_result, NULL, 0),
  151. 0, "Unexpected mallctlbymib() failure");
  152. assert_d_eq(mallctl("arenas.bin.0.nregs", &result_ref, &len_result,
  153. NULL, 0), 0, "Unexpected mallctl() failure");
  154. expect_zu_eq(result, result_ref,
  155. "mallctlbymib() and mallctl() returned different result");
  156. }
  157. TEST_END
  158. TEST_BEGIN(test_mallctlbymibname) {
  159. size_t mib[4];
  160. size_t miblen = 4;
  161. uint32_t result, result_ref;
  162. size_t len_result = sizeof(uint32_t);
  163. tsd_t *tsd = tsd_fetch();
  164. /* Error cases. */
  165. assert_d_eq(mallctlnametomib("arenas", mib, &miblen), 0,
  166. "Unexpected mallctlnametomib() failure");
  167. assert_zu_eq(miblen, 1, "");
  168. miblen = 4;
  169. assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0", &miblen,
  170. &result, &len_result, NULL, 0), ENOENT, "");
  171. miblen = 4;
  172. assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0.bob", &miblen,
  173. &result, &len_result, NULL, 0), ENOENT, "");
  174. assert_zu_eq(miblen, 4, "");
  175. /* Valid cases. */
  176. assert_d_eq(mallctl("arenas.bin.0.nregs", &result_ref, &len_result,
  177. NULL, 0), 0, "Unexpected mallctl() failure");
  178. miblen = 4;
  179. assert_d_eq(ctl_bymibname(tsd, mib, 0, "arenas.bin.0.nregs", &miblen,
  180. &result, &len_result, NULL, 0), 0, "");
  181. assert_zu_eq(miblen, 4, "");
  182. expect_zu_eq(result, result_ref, "Unexpected result");
  183. assert_d_eq(ctl_bymibname(tsd, mib, 1, "bin.0.nregs", &miblen, &result,
  184. &len_result, NULL, 0), 0, "");
  185. assert_zu_eq(miblen, 4, "");
  186. expect_zu_eq(result, result_ref, "Unexpected result");
  187. assert_d_eq(ctl_bymibname(tsd, mib, 2, "0.nregs", &miblen, &result,
  188. &len_result, NULL, 0), 0, "");
  189. assert_zu_eq(miblen, 4, "");
  190. expect_zu_eq(result, result_ref, "Unexpected result");
  191. assert_d_eq(ctl_bymibname(tsd, mib, 3, "nregs", &miblen, &result,
  192. &len_result, NULL, 0), 0, "");
  193. assert_zu_eq(miblen, 4, "");
  194. expect_zu_eq(result, result_ref, "Unexpected result");
  195. }
  196. TEST_END
  197. TEST_BEGIN(test_mallctl_config) {
  198. #define TEST_MALLCTL_CONFIG(config, t) do { \
  199. t oldval; \
  200. size_t sz = sizeof(oldval); \
  201. expect_d_eq(mallctl("config."#config, (void *)&oldval, &sz, \
  202. NULL, 0), 0, "Unexpected mallctl() failure"); \
  203. expect_b_eq(oldval, config_##config, "Incorrect config value"); \
  204. expect_zu_eq(sz, sizeof(oldval), "Unexpected output size"); \
  205. } while (0)
  206. TEST_MALLCTL_CONFIG(cache_oblivious, bool);
  207. TEST_MALLCTL_CONFIG(debug, bool);
  208. TEST_MALLCTL_CONFIG(fill, bool);
  209. TEST_MALLCTL_CONFIG(lazy_lock, bool);
  210. TEST_MALLCTL_CONFIG(malloc_conf, const char *);
  211. TEST_MALLCTL_CONFIG(prof, bool);
  212. TEST_MALLCTL_CONFIG(prof_libgcc, bool);
  213. TEST_MALLCTL_CONFIG(prof_libunwind, bool);
  214. TEST_MALLCTL_CONFIG(stats, bool);
  215. TEST_MALLCTL_CONFIG(utrace, bool);
  216. TEST_MALLCTL_CONFIG(xmalloc, bool);
  217. #undef TEST_MALLCTL_CONFIG
  218. }
  219. TEST_END
  220. TEST_BEGIN(test_mallctl_opt) {
  221. bool config_always = true;
  222. #define TEST_MALLCTL_OPT(t, opt, config) do { \
  223. t oldval; \
  224. size_t sz = sizeof(oldval); \
  225. int expected = config_##config ? 0 : ENOENT; \
  226. int result = mallctl("opt."#opt, (void *)&oldval, &sz, NULL, \
  227. 0); \
  228. expect_d_eq(result, expected, \
  229. "Unexpected mallctl() result for opt."#opt); \
  230. expect_zu_eq(sz, sizeof(oldval), "Unexpected output size"); \
  231. } while (0)
  232. TEST_MALLCTL_OPT(bool, abort, always);
  233. TEST_MALLCTL_OPT(bool, abort_conf, always);
  234. TEST_MALLCTL_OPT(bool, cache_oblivious, always);
  235. TEST_MALLCTL_OPT(bool, trust_madvise, always);
  236. TEST_MALLCTL_OPT(bool, confirm_conf, always);
  237. TEST_MALLCTL_OPT(const char *, metadata_thp, always);
  238. TEST_MALLCTL_OPT(bool, retain, always);
  239. TEST_MALLCTL_OPT(const char *, dss, always);
  240. TEST_MALLCTL_OPT(bool, hpa, always);
  241. TEST_MALLCTL_OPT(size_t, hpa_slab_max_alloc, always);
  242. TEST_MALLCTL_OPT(size_t, hpa_sec_nshards, always);
  243. TEST_MALLCTL_OPT(size_t, hpa_sec_max_alloc, always);
  244. TEST_MALLCTL_OPT(size_t, hpa_sec_max_bytes, always);
  245. TEST_MALLCTL_OPT(size_t, hpa_sec_bytes_after_flush, always);
  246. TEST_MALLCTL_OPT(size_t, hpa_sec_batch_fill_extra, always);
  247. TEST_MALLCTL_OPT(unsigned, narenas, always);
  248. TEST_MALLCTL_OPT(const char *, percpu_arena, always);
  249. TEST_MALLCTL_OPT(size_t, oversize_threshold, always);
  250. TEST_MALLCTL_OPT(bool, background_thread, always);
  251. TEST_MALLCTL_OPT(ssize_t, dirty_decay_ms, always);
  252. TEST_MALLCTL_OPT(ssize_t, muzzy_decay_ms, always);
  253. TEST_MALLCTL_OPT(bool, stats_print, always);
  254. TEST_MALLCTL_OPT(const char *, stats_print_opts, always);
  255. TEST_MALLCTL_OPT(int64_t, stats_interval, always);
  256. TEST_MALLCTL_OPT(const char *, stats_interval_opts, always);
  257. TEST_MALLCTL_OPT(const char *, junk, fill);
  258. TEST_MALLCTL_OPT(bool, zero, fill);
  259. TEST_MALLCTL_OPT(bool, utrace, utrace);
  260. TEST_MALLCTL_OPT(bool, xmalloc, xmalloc);
  261. TEST_MALLCTL_OPT(bool, tcache, always);
  262. TEST_MALLCTL_OPT(size_t, lg_extent_max_active_fit, always);
  263. TEST_MALLCTL_OPT(size_t, tcache_max, always);
  264. TEST_MALLCTL_OPT(const char *, thp, always);
  265. TEST_MALLCTL_OPT(const char *, zero_realloc, always);
  266. TEST_MALLCTL_OPT(bool, prof, prof);
  267. TEST_MALLCTL_OPT(const char *, prof_prefix, prof);
  268. TEST_MALLCTL_OPT(bool, prof_active, prof);
  269. TEST_MALLCTL_OPT(ssize_t, lg_prof_sample, prof);
  270. TEST_MALLCTL_OPT(bool, prof_accum, prof);
  271. TEST_MALLCTL_OPT(ssize_t, lg_prof_interval, prof);
  272. TEST_MALLCTL_OPT(bool, prof_gdump, prof);
  273. TEST_MALLCTL_OPT(bool, prof_final, prof);
  274. TEST_MALLCTL_OPT(bool, prof_leak, prof);
  275. TEST_MALLCTL_OPT(bool, prof_leak_error, prof);
  276. TEST_MALLCTL_OPT(ssize_t, prof_recent_alloc_max, prof);
  277. TEST_MALLCTL_OPT(bool, prof_stats, prof);
  278. TEST_MALLCTL_OPT(bool, prof_sys_thread_name, prof);
  279. TEST_MALLCTL_OPT(ssize_t, lg_san_uaf_align, uaf_detection);
  280. #undef TEST_MALLCTL_OPT
  281. }
  282. TEST_END
  283. TEST_BEGIN(test_manpage_example) {
  284. unsigned nbins, i;
  285. size_t mib[4];
  286. size_t len, miblen;
  287. len = sizeof(nbins);
  288. expect_d_eq(mallctl("arenas.nbins", (void *)&nbins, &len, NULL, 0), 0,
  289. "Unexpected mallctl() failure");
  290. miblen = 4;
  291. expect_d_eq(mallctlnametomib("arenas.bin.0.size", mib, &miblen), 0,
  292. "Unexpected mallctlnametomib() failure");
  293. for (i = 0; i < nbins; i++) {
  294. size_t bin_size;
  295. mib[2] = i;
  296. len = sizeof(bin_size);
  297. expect_d_eq(mallctlbymib(mib, miblen, (void *)&bin_size, &len,
  298. NULL, 0), 0, "Unexpected mallctlbymib() failure");
  299. /* Do something with bin_size... */
  300. }
  301. }
  302. TEST_END
  303. TEST_BEGIN(test_tcache_none) {
  304. test_skip_if(!opt_tcache);
  305. /* Allocate p and q. */
  306. void *p0 = mallocx(42, 0);
  307. expect_ptr_not_null(p0, "Unexpected mallocx() failure");
  308. void *q = mallocx(42, 0);
  309. expect_ptr_not_null(q, "Unexpected mallocx() failure");
  310. /* Deallocate p and q, but bypass the tcache for q. */
  311. dallocx(p0, 0);
  312. dallocx(q, MALLOCX_TCACHE_NONE);
  313. /* Make sure that tcache-based allocation returns p, not q. */
  314. void *p1 = mallocx(42, 0);
  315. expect_ptr_not_null(p1, "Unexpected mallocx() failure");
  316. if (!opt_prof && !san_uaf_detection_enabled()) {
  317. expect_ptr_eq(p0, p1,
  318. "Expected tcache to allocate cached region");
  319. }
  320. /* Clean up. */
  321. dallocx(p1, MALLOCX_TCACHE_NONE);
  322. }
  323. TEST_END
  324. TEST_BEGIN(test_tcache) {
  325. #define NTCACHES 10
  326. unsigned tis[NTCACHES];
  327. void *ps[NTCACHES];
  328. void *qs[NTCACHES];
  329. unsigned i;
  330. size_t sz, psz, qsz;
  331. psz = 42;
  332. qsz = nallocx(psz, 0) + 1;
  333. /* Create tcaches. */
  334. for (i = 0; i < NTCACHES; i++) {
  335. sz = sizeof(unsigned);
  336. expect_d_eq(mallctl("tcache.create", (void *)&tis[i], &sz, NULL,
  337. 0), 0, "Unexpected mallctl() failure, i=%u", i);
  338. }
  339. /* Exercise tcache ID recycling. */
  340. for (i = 0; i < NTCACHES; i++) {
  341. expect_d_eq(mallctl("tcache.destroy", NULL, NULL,
  342. (void *)&tis[i], sizeof(unsigned)), 0,
  343. "Unexpected mallctl() failure, i=%u", i);
  344. }
  345. for (i = 0; i < NTCACHES; i++) {
  346. sz = sizeof(unsigned);
  347. expect_d_eq(mallctl("tcache.create", (void *)&tis[i], &sz, NULL,
  348. 0), 0, "Unexpected mallctl() failure, i=%u", i);
  349. }
  350. /* Flush empty tcaches. */
  351. for (i = 0; i < NTCACHES; i++) {
  352. expect_d_eq(mallctl("tcache.flush", NULL, NULL, (void *)&tis[i],
  353. sizeof(unsigned)), 0, "Unexpected mallctl() failure, i=%u",
  354. i);
  355. }
  356. /* Cache some allocations. */
  357. for (i = 0; i < NTCACHES; i++) {
  358. ps[i] = mallocx(psz, MALLOCX_TCACHE(tis[i]));
  359. expect_ptr_not_null(ps[i], "Unexpected mallocx() failure, i=%u",
  360. i);
  361. dallocx(ps[i], MALLOCX_TCACHE(tis[i]));
  362. qs[i] = mallocx(qsz, MALLOCX_TCACHE(tis[i]));
  363. expect_ptr_not_null(qs[i], "Unexpected mallocx() failure, i=%u",
  364. i);
  365. dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
  366. }
  367. /* Verify that tcaches allocate cached regions. */
  368. for (i = 0; i < NTCACHES; i++) {
  369. void *p0 = ps[i];
  370. ps[i] = mallocx(psz, MALLOCX_TCACHE(tis[i]));
  371. expect_ptr_not_null(ps[i], "Unexpected mallocx() failure, i=%u",
  372. i);
  373. if (!san_uaf_detection_enabled()) {
  374. expect_ptr_eq(ps[i], p0, "Expected mallocx() to "
  375. "allocate cached region, i=%u", i);
  376. }
  377. }
  378. /* Verify that reallocation uses cached regions. */
  379. for (i = 0; i < NTCACHES; i++) {
  380. void *q0 = qs[i];
  381. qs[i] = rallocx(ps[i], qsz, MALLOCX_TCACHE(tis[i]));
  382. expect_ptr_not_null(qs[i], "Unexpected rallocx() failure, i=%u",
  383. i);
  384. if (!san_uaf_detection_enabled()) {
  385. expect_ptr_eq(qs[i], q0, "Expected rallocx() to "
  386. "allocate cached region, i=%u", i);
  387. }
  388. /* Avoid undefined behavior in case of test failure. */
  389. if (qs[i] == NULL) {
  390. qs[i] = ps[i];
  391. }
  392. }
  393. for (i = 0; i < NTCACHES; i++) {
  394. dallocx(qs[i], MALLOCX_TCACHE(tis[i]));
  395. }
  396. /* Flush some non-empty tcaches. */
  397. for (i = 0; i < NTCACHES/2; i++) {
  398. expect_d_eq(mallctl("tcache.flush", NULL, NULL, (void *)&tis[i],
  399. sizeof(unsigned)), 0, "Unexpected mallctl() failure, i=%u",
  400. i);
  401. }
  402. /* Destroy tcaches. */
  403. for (i = 0; i < NTCACHES; i++) {
  404. expect_d_eq(mallctl("tcache.destroy", NULL, NULL,
  405. (void *)&tis[i], sizeof(unsigned)), 0,
  406. "Unexpected mallctl() failure, i=%u", i);
  407. }
  408. }
  409. TEST_END
  410. TEST_BEGIN(test_thread_arena) {
  411. unsigned old_arena_ind, new_arena_ind, narenas;
  412. const char *opa;
  413. size_t sz = sizeof(opa);
  414. expect_d_eq(mallctl("opt.percpu_arena", (void *)&opa, &sz, NULL, 0), 0,
  415. "Unexpected mallctl() failure");
  416. sz = sizeof(unsigned);
  417. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
  418. 0, "Unexpected mallctl() failure");
  419. if (opt_oversize_threshold != 0) {
  420. narenas--;
  421. }
  422. expect_u_eq(narenas, opt_narenas, "Number of arenas incorrect");
  423. if (strcmp(opa, "disabled") == 0) {
  424. new_arena_ind = narenas - 1;
  425. expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
  426. (void *)&new_arena_ind, sizeof(unsigned)), 0,
  427. "Unexpected mallctl() failure");
  428. new_arena_ind = 0;
  429. expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
  430. (void *)&new_arena_ind, sizeof(unsigned)), 0,
  431. "Unexpected mallctl() failure");
  432. } else {
  433. expect_d_eq(mallctl("thread.arena", (void *)&old_arena_ind, &sz,
  434. NULL, 0), 0, "Unexpected mallctl() failure");
  435. new_arena_ind = percpu_arena_ind_limit(opt_percpu_arena) - 1;
  436. if (old_arena_ind != new_arena_ind) {
  437. expect_d_eq(mallctl("thread.arena",
  438. (void *)&old_arena_ind, &sz, (void *)&new_arena_ind,
  439. sizeof(unsigned)), EPERM, "thread.arena ctl "
  440. "should not be allowed with percpu arena");
  441. }
  442. }
  443. }
  444. TEST_END
  445. TEST_BEGIN(test_arena_i_initialized) {
  446. unsigned narenas, i;
  447. size_t sz;
  448. size_t mib[3];
  449. size_t miblen = sizeof(mib) / sizeof(size_t);
  450. bool initialized;
  451. sz = sizeof(narenas);
  452. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
  453. 0, "Unexpected mallctl() failure");
  454. expect_d_eq(mallctlnametomib("arena.0.initialized", mib, &miblen), 0,
  455. "Unexpected mallctlnametomib() failure");
  456. for (i = 0; i < narenas; i++) {
  457. mib[1] = i;
  458. sz = sizeof(initialized);
  459. expect_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL,
  460. 0), 0, "Unexpected mallctl() failure");
  461. }
  462. mib[1] = MALLCTL_ARENAS_ALL;
  463. sz = sizeof(initialized);
  464. expect_d_eq(mallctlbymib(mib, miblen, &initialized, &sz, NULL, 0), 0,
  465. "Unexpected mallctl() failure");
  466. expect_true(initialized,
  467. "Merged arena statistics should always be initialized");
  468. /* Equivalent to the above but using mallctl() directly. */
  469. sz = sizeof(initialized);
  470. expect_d_eq(mallctl(
  471. "arena." STRINGIFY(MALLCTL_ARENAS_ALL) ".initialized",
  472. (void *)&initialized, &sz, NULL, 0), 0,
  473. "Unexpected mallctl() failure");
  474. expect_true(initialized,
  475. "Merged arena statistics should always be initialized");
  476. }
  477. TEST_END
  478. TEST_BEGIN(test_arena_i_dirty_decay_ms) {
  479. ssize_t dirty_decay_ms, orig_dirty_decay_ms, prev_dirty_decay_ms;
  480. size_t sz = sizeof(ssize_t);
  481. expect_d_eq(mallctl("arena.0.dirty_decay_ms",
  482. (void *)&orig_dirty_decay_ms, &sz, NULL, 0), 0,
  483. "Unexpected mallctl() failure");
  484. dirty_decay_ms = -2;
  485. expect_d_eq(mallctl("arena.0.dirty_decay_ms", NULL, NULL,
  486. (void *)&dirty_decay_ms, sizeof(ssize_t)), EFAULT,
  487. "Unexpected mallctl() success");
  488. dirty_decay_ms = 0x7fffffff;
  489. expect_d_eq(mallctl("arena.0.dirty_decay_ms", NULL, NULL,
  490. (void *)&dirty_decay_ms, sizeof(ssize_t)), 0,
  491. "Unexpected mallctl() failure");
  492. for (prev_dirty_decay_ms = dirty_decay_ms, dirty_decay_ms = -1;
  493. dirty_decay_ms < 20; prev_dirty_decay_ms = dirty_decay_ms,
  494. dirty_decay_ms++) {
  495. ssize_t old_dirty_decay_ms;
  496. expect_d_eq(mallctl("arena.0.dirty_decay_ms",
  497. (void *)&old_dirty_decay_ms, &sz, (void *)&dirty_decay_ms,
  498. sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
  499. expect_zd_eq(old_dirty_decay_ms, prev_dirty_decay_ms,
  500. "Unexpected old arena.0.dirty_decay_ms");
  501. }
  502. }
  503. TEST_END
  504. TEST_BEGIN(test_arena_i_muzzy_decay_ms) {
  505. ssize_t muzzy_decay_ms, orig_muzzy_decay_ms, prev_muzzy_decay_ms;
  506. size_t sz = sizeof(ssize_t);
  507. expect_d_eq(mallctl("arena.0.muzzy_decay_ms",
  508. (void *)&orig_muzzy_decay_ms, &sz, NULL, 0), 0,
  509. "Unexpected mallctl() failure");
  510. muzzy_decay_ms = -2;
  511. expect_d_eq(mallctl("arena.0.muzzy_decay_ms", NULL, NULL,
  512. (void *)&muzzy_decay_ms, sizeof(ssize_t)), EFAULT,
  513. "Unexpected mallctl() success");
  514. muzzy_decay_ms = 0x7fffffff;
  515. expect_d_eq(mallctl("arena.0.muzzy_decay_ms", NULL, NULL,
  516. (void *)&muzzy_decay_ms, sizeof(ssize_t)), 0,
  517. "Unexpected mallctl() failure");
  518. for (prev_muzzy_decay_ms = muzzy_decay_ms, muzzy_decay_ms = -1;
  519. muzzy_decay_ms < 20; prev_muzzy_decay_ms = muzzy_decay_ms,
  520. muzzy_decay_ms++) {
  521. ssize_t old_muzzy_decay_ms;
  522. expect_d_eq(mallctl("arena.0.muzzy_decay_ms",
  523. (void *)&old_muzzy_decay_ms, &sz, (void *)&muzzy_decay_ms,
  524. sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
  525. expect_zd_eq(old_muzzy_decay_ms, prev_muzzy_decay_ms,
  526. "Unexpected old arena.0.muzzy_decay_ms");
  527. }
  528. }
  529. TEST_END
  530. TEST_BEGIN(test_arena_i_purge) {
  531. unsigned narenas;
  532. size_t sz = sizeof(unsigned);
  533. size_t mib[3];
  534. size_t miblen = 3;
  535. expect_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
  536. "Unexpected mallctl() failure");
  537. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
  538. 0, "Unexpected mallctl() failure");
  539. expect_d_eq(mallctlnametomib("arena.0.purge", mib, &miblen), 0,
  540. "Unexpected mallctlnametomib() failure");
  541. mib[1] = narenas;
  542. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
  543. "Unexpected mallctlbymib() failure");
  544. mib[1] = MALLCTL_ARENAS_ALL;
  545. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
  546. "Unexpected mallctlbymib() failure");
  547. }
  548. TEST_END
  549. TEST_BEGIN(test_arena_i_decay) {
  550. unsigned narenas;
  551. size_t sz = sizeof(unsigned);
  552. size_t mib[3];
  553. size_t miblen = 3;
  554. expect_d_eq(mallctl("arena.0.decay", NULL, NULL, NULL, 0), 0,
  555. "Unexpected mallctl() failure");
  556. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas, &sz, NULL, 0),
  557. 0, "Unexpected mallctl() failure");
  558. expect_d_eq(mallctlnametomib("arena.0.decay", mib, &miblen), 0,
  559. "Unexpected mallctlnametomib() failure");
  560. mib[1] = narenas;
  561. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
  562. "Unexpected mallctlbymib() failure");
  563. mib[1] = MALLCTL_ARENAS_ALL;
  564. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, NULL, 0), 0,
  565. "Unexpected mallctlbymib() failure");
  566. }
  567. TEST_END
  568. TEST_BEGIN(test_arena_i_dss) {
  569. const char *dss_prec_old, *dss_prec_new;
  570. size_t sz = sizeof(dss_prec_old);
  571. size_t mib[3];
  572. size_t miblen;
  573. miblen = sizeof(mib)/sizeof(size_t);
  574. expect_d_eq(mallctlnametomib("arena.0.dss", mib, &miblen), 0,
  575. "Unexpected mallctlnametomib() error");
  576. dss_prec_new = "disabled";
  577. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz,
  578. (void *)&dss_prec_new, sizeof(dss_prec_new)), 0,
  579. "Unexpected mallctl() failure");
  580. expect_str_ne(dss_prec_old, "primary",
  581. "Unexpected default for dss precedence");
  582. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_new, &sz,
  583. (void *)&dss_prec_old, sizeof(dss_prec_old)), 0,
  584. "Unexpected mallctl() failure");
  585. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz, NULL,
  586. 0), 0, "Unexpected mallctl() failure");
  587. expect_str_ne(dss_prec_old, "primary",
  588. "Unexpected value for dss precedence");
  589. mib[1] = narenas_total_get();
  590. dss_prec_new = "disabled";
  591. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz,
  592. (void *)&dss_prec_new, sizeof(dss_prec_new)), 0,
  593. "Unexpected mallctl() failure");
  594. expect_str_ne(dss_prec_old, "primary",
  595. "Unexpected default for dss precedence");
  596. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_new, &sz,
  597. (void *)&dss_prec_old, sizeof(dss_prec_new)), 0,
  598. "Unexpected mallctl() failure");
  599. expect_d_eq(mallctlbymib(mib, miblen, (void *)&dss_prec_old, &sz, NULL,
  600. 0), 0, "Unexpected mallctl() failure");
  601. expect_str_ne(dss_prec_old, "primary",
  602. "Unexpected value for dss precedence");
  603. }
  604. TEST_END
  605. TEST_BEGIN(test_arena_i_retain_grow_limit) {
  606. size_t old_limit, new_limit, default_limit;
  607. size_t mib[3];
  608. size_t miblen;
  609. bool retain_enabled;
  610. size_t sz = sizeof(retain_enabled);
  611. expect_d_eq(mallctl("opt.retain", &retain_enabled, &sz, NULL, 0),
  612. 0, "Unexpected mallctl() failure");
  613. test_skip_if(!retain_enabled);
  614. sz = sizeof(default_limit);
  615. miblen = sizeof(mib)/sizeof(size_t);
  616. expect_d_eq(mallctlnametomib("arena.0.retain_grow_limit", mib, &miblen),
  617. 0, "Unexpected mallctlnametomib() error");
  618. expect_d_eq(mallctlbymib(mib, miblen, &default_limit, &sz, NULL, 0), 0,
  619. "Unexpected mallctl() failure");
  620. expect_zu_eq(default_limit, SC_LARGE_MAXCLASS,
  621. "Unexpected default for retain_grow_limit");
  622. new_limit = PAGE - 1;
  623. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
  624. sizeof(new_limit)), EFAULT, "Unexpected mallctl() success");
  625. new_limit = PAGE + 1;
  626. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
  627. sizeof(new_limit)), 0, "Unexpected mallctl() failure");
  628. expect_d_eq(mallctlbymib(mib, miblen, &old_limit, &sz, NULL, 0), 0,
  629. "Unexpected mallctl() failure");
  630. expect_zu_eq(old_limit, PAGE,
  631. "Unexpected value for retain_grow_limit");
  632. /* Expect grow less than psize class 10. */
  633. new_limit = sz_pind2sz(10) - 1;
  634. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &new_limit,
  635. sizeof(new_limit)), 0, "Unexpected mallctl() failure");
  636. expect_d_eq(mallctlbymib(mib, miblen, &old_limit, &sz, NULL, 0), 0,
  637. "Unexpected mallctl() failure");
  638. expect_zu_eq(old_limit, sz_pind2sz(9),
  639. "Unexpected value for retain_grow_limit");
  640. /* Restore to default. */
  641. expect_d_eq(mallctlbymib(mib, miblen, NULL, NULL, &default_limit,
  642. sizeof(default_limit)), 0, "Unexpected mallctl() failure");
  643. }
  644. TEST_END
  645. TEST_BEGIN(test_arenas_dirty_decay_ms) {
  646. ssize_t dirty_decay_ms, orig_dirty_decay_ms, prev_dirty_decay_ms;
  647. size_t sz = sizeof(ssize_t);
  648. expect_d_eq(mallctl("arenas.dirty_decay_ms",
  649. (void *)&orig_dirty_decay_ms, &sz, NULL, 0), 0,
  650. "Unexpected mallctl() failure");
  651. dirty_decay_ms = -2;
  652. expect_d_eq(mallctl("arenas.dirty_decay_ms", NULL, NULL,
  653. (void *)&dirty_decay_ms, sizeof(ssize_t)), EFAULT,
  654. "Unexpected mallctl() success");
  655. dirty_decay_ms = 0x7fffffff;
  656. expect_d_eq(mallctl("arenas.dirty_decay_ms", NULL, NULL,
  657. (void *)&dirty_decay_ms, sizeof(ssize_t)), 0,
  658. "Expected mallctl() failure");
  659. for (prev_dirty_decay_ms = dirty_decay_ms, dirty_decay_ms = -1;
  660. dirty_decay_ms < 20; prev_dirty_decay_ms = dirty_decay_ms,
  661. dirty_decay_ms++) {
  662. ssize_t old_dirty_decay_ms;
  663. expect_d_eq(mallctl("arenas.dirty_decay_ms",
  664. (void *)&old_dirty_decay_ms, &sz, (void *)&dirty_decay_ms,
  665. sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
  666. expect_zd_eq(old_dirty_decay_ms, prev_dirty_decay_ms,
  667. "Unexpected old arenas.dirty_decay_ms");
  668. }
  669. }
  670. TEST_END
  671. TEST_BEGIN(test_arenas_muzzy_decay_ms) {
  672. ssize_t muzzy_decay_ms, orig_muzzy_decay_ms, prev_muzzy_decay_ms;
  673. size_t sz = sizeof(ssize_t);
  674. expect_d_eq(mallctl("arenas.muzzy_decay_ms",
  675. (void *)&orig_muzzy_decay_ms, &sz, NULL, 0), 0,
  676. "Unexpected mallctl() failure");
  677. muzzy_decay_ms = -2;
  678. expect_d_eq(mallctl("arenas.muzzy_decay_ms", NULL, NULL,
  679. (void *)&muzzy_decay_ms, sizeof(ssize_t)), EFAULT,
  680. "Unexpected mallctl() success");
  681. muzzy_decay_ms = 0x7fffffff;
  682. expect_d_eq(mallctl("arenas.muzzy_decay_ms", NULL, NULL,
  683. (void *)&muzzy_decay_ms, sizeof(ssize_t)), 0,
  684. "Expected mallctl() failure");
  685. for (prev_muzzy_decay_ms = muzzy_decay_ms, muzzy_decay_ms = -1;
  686. muzzy_decay_ms < 20; prev_muzzy_decay_ms = muzzy_decay_ms,
  687. muzzy_decay_ms++) {
  688. ssize_t old_muzzy_decay_ms;
  689. expect_d_eq(mallctl("arenas.muzzy_decay_ms",
  690. (void *)&old_muzzy_decay_ms, &sz, (void *)&muzzy_decay_ms,
  691. sizeof(ssize_t)), 0, "Unexpected mallctl() failure");
  692. expect_zd_eq(old_muzzy_decay_ms, prev_muzzy_decay_ms,
  693. "Unexpected old arenas.muzzy_decay_ms");
  694. }
  695. }
  696. TEST_END
  697. TEST_BEGIN(test_arenas_constants) {
  698. #define TEST_ARENAS_CONSTANT(t, name, expected) do { \
  699. t name; \
  700. size_t sz = sizeof(t); \
  701. expect_d_eq(mallctl("arenas."#name, (void *)&name, &sz, NULL, \
  702. 0), 0, "Unexpected mallctl() failure"); \
  703. expect_zu_eq(name, expected, "Incorrect "#name" size"); \
  704. } while (0)
  705. TEST_ARENAS_CONSTANT(size_t, quantum, QUANTUM);
  706. TEST_ARENAS_CONSTANT(size_t, page, PAGE);
  707. TEST_ARENAS_CONSTANT(unsigned, nbins, SC_NBINS);
  708. TEST_ARENAS_CONSTANT(unsigned, nlextents, SC_NSIZES - SC_NBINS);
  709. #undef TEST_ARENAS_CONSTANT
  710. }
  711. TEST_END
  712. TEST_BEGIN(test_arenas_bin_constants) {
  713. #define TEST_ARENAS_BIN_CONSTANT(t, name, expected) do { \
  714. t name; \
  715. size_t sz = sizeof(t); \
  716. expect_d_eq(mallctl("arenas.bin.0."#name, (void *)&name, &sz, \
  717. NULL, 0), 0, "Unexpected mallctl() failure"); \
  718. expect_zu_eq(name, expected, "Incorrect "#name" size"); \
  719. } while (0)
  720. TEST_ARENAS_BIN_CONSTANT(size_t, size, bin_infos[0].reg_size);
  721. TEST_ARENAS_BIN_CONSTANT(uint32_t, nregs, bin_infos[0].nregs);
  722. TEST_ARENAS_BIN_CONSTANT(size_t, slab_size,
  723. bin_infos[0].slab_size);
  724. TEST_ARENAS_BIN_CONSTANT(uint32_t, nshards, bin_infos[0].n_shards);
  725. #undef TEST_ARENAS_BIN_CONSTANT
  726. }
  727. TEST_END
  728. TEST_BEGIN(test_arenas_lextent_constants) {
  729. #define TEST_ARENAS_LEXTENT_CONSTANT(t, name, expected) do { \
  730. t name; \
  731. size_t sz = sizeof(t); \
  732. expect_d_eq(mallctl("arenas.lextent.0."#name, (void *)&name, \
  733. &sz, NULL, 0), 0, "Unexpected mallctl() failure"); \
  734. expect_zu_eq(name, expected, "Incorrect "#name" size"); \
  735. } while (0)
  736. TEST_ARENAS_LEXTENT_CONSTANT(size_t, size,
  737. SC_LARGE_MINCLASS);
  738. #undef TEST_ARENAS_LEXTENT_CONSTANT
  739. }
  740. TEST_END
  741. TEST_BEGIN(test_arenas_create) {
  742. unsigned narenas_before, arena, narenas_after;
  743. size_t sz = sizeof(unsigned);
  744. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas_before, &sz,
  745. NULL, 0), 0, "Unexpected mallctl() failure");
  746. expect_d_eq(mallctl("arenas.create", (void *)&arena, &sz, NULL, 0), 0,
  747. "Unexpected mallctl() failure");
  748. expect_d_eq(mallctl("arenas.narenas", (void *)&narenas_after, &sz, NULL,
  749. 0), 0, "Unexpected mallctl() failure");
  750. expect_u_eq(narenas_before+1, narenas_after,
  751. "Unexpected number of arenas before versus after extension");
  752. expect_u_eq(arena, narenas_after-1, "Unexpected arena index");
  753. }
  754. TEST_END
  755. TEST_BEGIN(test_arenas_lookup) {
  756. unsigned arena, arena1;
  757. void *ptr;
  758. size_t sz = sizeof(unsigned);
  759. expect_d_eq(mallctl("arenas.create", (void *)&arena, &sz, NULL, 0), 0,
  760. "Unexpected mallctl() failure");
  761. ptr = mallocx(42, MALLOCX_ARENA(arena) | MALLOCX_TCACHE_NONE);
  762. expect_ptr_not_null(ptr, "Unexpected mallocx() failure");
  763. expect_d_eq(mallctl("arenas.lookup", &arena1, &sz, &ptr, sizeof(ptr)),
  764. 0, "Unexpected mallctl() failure");
  765. expect_u_eq(arena, arena1, "Unexpected arena index");
  766. dallocx(ptr, 0);
  767. }
  768. TEST_END
  769. TEST_BEGIN(test_prof_active) {
  770. /*
  771. * If config_prof is off, then the test for prof_active in
  772. * test_mallctl_opt was already enough.
  773. */
  774. test_skip_if(!config_prof);
  775. test_skip_if(opt_prof);
  776. bool active, old;
  777. size_t len = sizeof(bool);
  778. active = true;
  779. expect_d_eq(mallctl("prof.active", NULL, NULL, &active, len), ENOENT,
  780. "Setting prof_active to true should fail when opt_prof is off");
  781. old = true;
  782. expect_d_eq(mallctl("prof.active", &old, &len, &active, len), ENOENT,
  783. "Setting prof_active to true should fail when opt_prof is off");
  784. expect_true(old, "old value should not be touched when mallctl fails");
  785. active = false;
  786. expect_d_eq(mallctl("prof.active", NULL, NULL, &active, len), 0,
  787. "Setting prof_active to false should succeed when opt_prof is off");
  788. expect_d_eq(mallctl("prof.active", &old, &len, &active, len), 0,
  789. "Setting prof_active to false should succeed when opt_prof is off");
  790. expect_false(old, "prof_active should be false when opt_prof is off");
  791. }
  792. TEST_END
  793. TEST_BEGIN(test_stats_arenas) {
  794. #define TEST_STATS_ARENAS(t, name) do { \
  795. t name; \
  796. size_t sz = sizeof(t); \
  797. expect_d_eq(mallctl("stats.arenas.0."#name, (void *)&name, &sz, \
  798. NULL, 0), 0, "Unexpected mallctl() failure"); \
  799. } while (0)
  800. TEST_STATS_ARENAS(unsigned, nthreads);
  801. TEST_STATS_ARENAS(const char *, dss);
  802. TEST_STATS_ARENAS(ssize_t, dirty_decay_ms);
  803. TEST_STATS_ARENAS(ssize_t, muzzy_decay_ms);
  804. TEST_STATS_ARENAS(size_t, pactive);
  805. TEST_STATS_ARENAS(size_t, pdirty);
  806. #undef TEST_STATS_ARENAS
  807. }
  808. TEST_END
  809. static void
  810. alloc_hook(void *extra, UNUSED hook_alloc_t type, UNUSED void *result,
  811. UNUSED uintptr_t result_raw, UNUSED uintptr_t args_raw[3]) {
  812. *(bool *)extra = true;
  813. }
  814. static void
  815. dalloc_hook(void *extra, UNUSED hook_dalloc_t type,
  816. UNUSED void *address, UNUSED uintptr_t args_raw[3]) {
  817. *(bool *)extra = true;
  818. }
  819. TEST_BEGIN(test_hooks) {
  820. bool hook_called = false;
  821. hooks_t hooks = {&alloc_hook, &dalloc_hook, NULL, &hook_called};
  822. void *handle = NULL;
  823. size_t sz = sizeof(handle);
  824. int err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
  825. sizeof(hooks));
  826. expect_d_eq(err, 0, "Hook installation failed");
  827. expect_ptr_ne(handle, NULL, "Hook installation gave null handle");
  828. void *ptr = mallocx(1, 0);
  829. expect_true(hook_called, "Alloc hook not called");
  830. hook_called = false;
  831. free(ptr);
  832. expect_true(hook_called, "Free hook not called");
  833. err = mallctl("experimental.hooks.remove", NULL, NULL, &handle,
  834. sizeof(handle));
  835. expect_d_eq(err, 0, "Hook removal failed");
  836. hook_called = false;
  837. ptr = mallocx(1, 0);
  838. free(ptr);
  839. expect_false(hook_called, "Hook called after removal");
  840. }
  841. TEST_END
  842. TEST_BEGIN(test_hooks_exhaustion) {
  843. bool hook_called = false;
  844. hooks_t hooks = {&alloc_hook, &dalloc_hook, NULL, &hook_called};
  845. void *handle;
  846. void *handles[HOOK_MAX];
  847. size_t sz = sizeof(handle);
  848. int err;
  849. for (int i = 0; i < HOOK_MAX; i++) {
  850. handle = NULL;
  851. err = mallctl("experimental.hooks.install", &handle, &sz,
  852. &hooks, sizeof(hooks));
  853. expect_d_eq(err, 0, "Error installation hooks");
  854. expect_ptr_ne(handle, NULL, "Got NULL handle");
  855. handles[i] = handle;
  856. }
  857. err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
  858. sizeof(hooks));
  859. expect_d_eq(err, EAGAIN, "Should have failed hook installation");
  860. for (int i = 0; i < HOOK_MAX; i++) {
  861. err = mallctl("experimental.hooks.remove", NULL, NULL,
  862. &handles[i], sizeof(handles[i]));
  863. expect_d_eq(err, 0, "Hook removal failed");
  864. }
  865. /* Insertion failed, but then we removed some; it should work now. */
  866. handle = NULL;
  867. err = mallctl("experimental.hooks.install", &handle, &sz, &hooks,
  868. sizeof(hooks));
  869. expect_d_eq(err, 0, "Hook insertion failed");
  870. expect_ptr_ne(handle, NULL, "Got NULL handle");
  871. err = mallctl("experimental.hooks.remove", NULL, NULL, &handle,
  872. sizeof(handle));
  873. expect_d_eq(err, 0, "Hook removal failed");
  874. }
  875. TEST_END
  876. TEST_BEGIN(test_thread_idle) {
  877. /*
  878. * We're cheating a little bit in this test, and inferring things about
  879. * implementation internals (like tcache details). We have to;
  880. * thread.idle has no guaranteed effects. We need stats to make these
  881. * inferences.
  882. */
  883. test_skip_if(!config_stats);
  884. int err;
  885. size_t sz;
  886. size_t miblen;
  887. bool tcache_enabled = false;
  888. sz = sizeof(tcache_enabled);
  889. err = mallctl("thread.tcache.enabled", &tcache_enabled, &sz, NULL, 0);
  890. expect_d_eq(err, 0, "");
  891. test_skip_if(!tcache_enabled);
  892. size_t tcache_max;
  893. sz = sizeof(tcache_max);
  894. err = mallctl("arenas.tcache_max", &tcache_max, &sz, NULL, 0);
  895. expect_d_eq(err, 0, "");
  896. test_skip_if(tcache_max == 0);
  897. unsigned arena_ind;
  898. sz = sizeof(arena_ind);
  899. err = mallctl("thread.arena", &arena_ind, &sz, NULL, 0);
  900. expect_d_eq(err, 0, "");
  901. /* We're going to do an allocation of size 1, which we know is small. */
  902. size_t mib[5];
  903. miblen = sizeof(mib)/sizeof(mib[0]);
  904. err = mallctlnametomib("stats.arenas.0.small.ndalloc", mib, &miblen);
  905. expect_d_eq(err, 0, "");
  906. mib[2] = arena_ind;
  907. /*
  908. * This alloc and dalloc should leave something in the tcache, in a
  909. * small size's cache bin.
  910. */
  911. void *ptr = mallocx(1, 0);
  912. dallocx(ptr, 0);
  913. uint64_t epoch;
  914. err = mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch));
  915. expect_d_eq(err, 0, "");
  916. uint64_t small_dalloc_pre_idle;
  917. sz = sizeof(small_dalloc_pre_idle);
  918. err = mallctlbymib(mib, miblen, &small_dalloc_pre_idle, &sz, NULL, 0);
  919. expect_d_eq(err, 0, "");
  920. err = mallctl("thread.idle", NULL, NULL, NULL, 0);
  921. expect_d_eq(err, 0, "");
  922. err = mallctl("epoch", NULL, NULL, &epoch, sizeof(epoch));
  923. expect_d_eq(err, 0, "");
  924. uint64_t small_dalloc_post_idle;
  925. sz = sizeof(small_dalloc_post_idle);
  926. err = mallctlbymib(mib, miblen, &small_dalloc_post_idle, &sz, NULL, 0);
  927. expect_d_eq(err, 0, "");
  928. expect_u64_lt(small_dalloc_pre_idle, small_dalloc_post_idle,
  929. "Purge didn't flush the tcache");
  930. }
  931. TEST_END
  932. TEST_BEGIN(test_thread_peak) {
  933. test_skip_if(!config_stats);
  934. /*
  935. * We don't commit to any stable amount of accuracy for peak tracking
  936. * (in practice, when this test was written, we made sure to be within
  937. * 100k). But 10MB is big for more or less any definition of big.
  938. */
  939. size_t big_size = 10 * 1024 * 1024;
  940. size_t small_size = 256;
  941. void *ptr;
  942. int err;
  943. size_t sz;
  944. uint64_t peak;
  945. sz = sizeof(uint64_t);
  946. err = mallctl("thread.peak.reset", NULL, NULL, NULL, 0);
  947. expect_d_eq(err, 0, "");
  948. ptr = mallocx(SC_SMALL_MAXCLASS, 0);
  949. err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
  950. expect_d_eq(err, 0, "");
  951. expect_u64_eq(peak, SC_SMALL_MAXCLASS, "Missed an update");
  952. free(ptr);
  953. err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
  954. expect_d_eq(err, 0, "");
  955. expect_u64_eq(peak, SC_SMALL_MAXCLASS, "Freeing changed peak");
  956. ptr = mallocx(big_size, 0);
  957. free(ptr);
  958. /*
  959. * The peak should have hit big_size in the last two lines, even though
  960. * the net allocated bytes has since dropped back down to zero. We
  961. * should have noticed the peak change without having down any mallctl
  962. * calls while net allocated bytes was high.
  963. */
  964. err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
  965. expect_d_eq(err, 0, "");
  966. expect_u64_ge(peak, big_size, "Missed a peak change.");
  967. /* Allocate big_size, but using small allocations. */
  968. size_t nallocs = big_size / small_size;
  969. void **ptrs = calloc(nallocs, sizeof(void *));
  970. err = mallctl("thread.peak.reset", NULL, NULL, NULL, 0);
  971. expect_d_eq(err, 0, "");
  972. err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
  973. expect_d_eq(err, 0, "");
  974. expect_u64_eq(0, peak, "Missed a reset.");
  975. for (size_t i = 0; i < nallocs; i++) {
  976. ptrs[i] = mallocx(small_size, 0);
  977. }
  978. for (size_t i = 0; i < nallocs; i++) {
  979. free(ptrs[i]);
  980. }
  981. err = mallctl("thread.peak.read", &peak, &sz, NULL, 0);
  982. expect_d_eq(err, 0, "");
  983. /*
  984. * We don't guarantee exactness; make sure we're within 10% of the peak,
  985. * though.
  986. */
  987. expect_u64_ge(peak, nallocx(small_size, 0) * nallocs * 9 / 10,
  988. "Missed some peak changes.");
  989. expect_u64_le(peak, nallocx(small_size, 0) * nallocs * 11 / 10,
  990. "Overcounted peak changes.");
  991. free(ptrs);
  992. }
  993. TEST_END
  994. typedef struct activity_test_data_s activity_test_data_t;
  995. struct activity_test_data_s {
  996. uint64_t obtained_alloc;
  997. uint64_t obtained_dalloc;
  998. };
  999. static void
  1000. activity_test_callback(void *uctx, uint64_t alloc, uint64_t dalloc) {
  1001. activity_test_data_t *test_data = (activity_test_data_t *)uctx;
  1002. test_data->obtained_alloc = alloc;
  1003. test_data->obtained_dalloc = dalloc;
  1004. }
  1005. TEST_BEGIN(test_thread_activity_callback) {
  1006. test_skip_if(!config_stats);
  1007. const size_t big_size = 10 * 1024 * 1024;
  1008. void *ptr;
  1009. int err;
  1010. size_t sz;
  1011. uint64_t *allocatedp;
  1012. uint64_t *deallocatedp;
  1013. sz = sizeof(allocatedp);
  1014. err = mallctl("thread.allocatedp", &allocatedp, &sz, NULL, 0);
  1015. assert_d_eq(0, err, "");
  1016. err = mallctl("thread.deallocatedp", &deallocatedp, &sz, NULL, 0);
  1017. assert_d_eq(0, err, "");
  1018. activity_callback_thunk_t old_thunk = {(activity_callback_t)111,
  1019. (void *)222};
  1020. activity_test_data_t test_data = {333, 444};
  1021. activity_callback_thunk_t new_thunk =
  1022. {&activity_test_callback, &test_data};
  1023. sz = sizeof(old_thunk);
  1024. err = mallctl("experimental.thread.activity_callback", &old_thunk, &sz,
  1025. &new_thunk, sizeof(new_thunk));
  1026. assert_d_eq(0, err, "");
  1027. expect_true(old_thunk.callback == NULL, "Callback already installed");
  1028. expect_true(old_thunk.uctx == NULL, "Callback data already installed");
  1029. ptr = mallocx(big_size, 0);
  1030. expect_u64_eq(test_data.obtained_alloc, *allocatedp, "");
  1031. expect_u64_eq(test_data.obtained_dalloc, *deallocatedp, "");
  1032. free(ptr);
  1033. expect_u64_eq(test_data.obtained_alloc, *allocatedp, "");
  1034. expect_u64_eq(test_data.obtained_dalloc, *deallocatedp, "");
  1035. sz = sizeof(old_thunk);
  1036. new_thunk = (activity_callback_thunk_t){ NULL, NULL };
  1037. err = mallctl("experimental.thread.activity_callback", &old_thunk, &sz,
  1038. &new_thunk, sizeof(new_thunk));
  1039. assert_d_eq(0, err, "");
  1040. expect_true(old_thunk.callback == &activity_test_callback, "");
  1041. expect_true(old_thunk.uctx == &test_data, "");
  1042. /* Inserting NULL should have turned off tracking. */
  1043. test_data.obtained_alloc = 333;
  1044. test_data.obtained_dalloc = 444;
  1045. ptr = mallocx(big_size, 0);
  1046. free(ptr);
  1047. expect_u64_eq(333, test_data.obtained_alloc, "");
  1048. expect_u64_eq(444, test_data.obtained_dalloc, "");
  1049. }
  1050. TEST_END
  1051. int
  1052. main(void) {
  1053. return test(
  1054. test_mallctl_errors,
  1055. test_mallctlnametomib_errors,
  1056. test_mallctlbymib_errors,
  1057. test_mallctl_read_write,
  1058. test_mallctlnametomib_short_mib,
  1059. test_mallctlnametomib_short_name,
  1060. test_mallctlmibnametomib,
  1061. test_mallctlbymibname,
  1062. test_mallctl_config,
  1063. test_mallctl_opt,
  1064. test_manpage_example,
  1065. test_tcache_none,
  1066. test_tcache,
  1067. test_thread_arena,
  1068. test_arena_i_initialized,
  1069. test_arena_i_dirty_decay_ms,
  1070. test_arena_i_muzzy_decay_ms,
  1071. test_arena_i_purge,
  1072. test_arena_i_decay,
  1073. test_arena_i_dss,
  1074. test_arena_i_retain_grow_limit,
  1075. test_arenas_dirty_decay_ms,
  1076. test_arenas_muzzy_decay_ms,
  1077. test_arenas_constants,
  1078. test_arenas_bin_constants,
  1079. test_arenas_lextent_constants,
  1080. test_arenas_create,
  1081. test_arenas_lookup,
  1082. test_prof_active,
  1083. test_stats_arenas,
  1084. test_hooks,
  1085. test_hooks_exhaustion,
  1086. test_thread_idle,
  1087. test_thread_peak,
  1088. test_thread_activity_callback);
  1089. }