zend_gc.c 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642
  1. /*
  2. +----------------------------------------------------------------------+
  3. | Zend Engine |
  4. +----------------------------------------------------------------------+
  5. | Copyright (c) 1998-2018 Zend Technologies Ltd. (http://www.zend.com) |
  6. +----------------------------------------------------------------------+
  7. | This source file is subject to version 2.00 of the Zend license, |
  8. | that is bundled with this package in the file LICENSE, and is |
  9. | available through the world-wide-web at the following url: |
  10. | http://www.zend.com/license/2_00.txt. |
  11. | If you did not receive a copy of the Zend license and are unable to |
  12. | obtain it through the world-wide-web, please send a note to |
  13. | license@zend.com so we can mail you a copy immediately. |
  14. +----------------------------------------------------------------------+
  15. | Authors: David Wang <planetbeing@gmail.com> |
  16. | Dmitry Stogov <dmitry@php.net> |
  17. +----------------------------------------------------------------------+
  18. */
  19. /**
  20. * zend_gc_collect_cycles
  21. * ======================
  22. *
  23. * Colors and its meaning
  24. * ----------------------
  25. *
  26. * BLACK (GC_BLACK) - In use or free.
  27. * GREY (GC_GREY) - Possible member of cycle.
  28. * WHITE (GC_WHITE) - Member of garbage cycle.
  29. * PURPLE (GC_PURPLE) - Possible root of cycle.
  30. *
  31. * Colors described in the paper but not used
  32. * ------------------------------------------
  33. *
  34. * GREEN - Acyclic
  35. * RED - Candidate cycle underogin
  36. * ORANGE - Candidate cycle awaiting epoch boundary.
  37. *
  38. *
  39. * Flow
  40. * =====
  41. *
  42. * The garbage collect cycle starts from 'gc_mark_roots', which traverses the
  43. * possible roots, and calls mark_grey for roots are marked purple with
  44. * depth-first traverse.
  45. *
  46. * After all possible roots are traversed and marked,
  47. * gc_scan_roots will be called, and each root will be called with
  48. * gc_scan(root->ref)
  49. *
  50. * gc_scan checks the colors of possible members.
  51. *
  52. * If the node is marked as grey and the refcount > 0
  53. * gc_scan_black will be called on that node to scan it's subgraph.
  54. * otherwise (refcount == 0), it marks the node white.
  55. *
  56. * A node MAY be added to possbile roots when ZEND_UNSET_VAR happens or
  57. * zend_assign_to_variable is called only when possible garbage node is
  58. * produced.
  59. * gc_possible_root() will be called to add the nodes to possible roots.
  60. *
  61. *
  62. * For objects, we call their get_gc handler (by default 'zend_std_get_gc') to
  63. * get the object properties to scan.
  64. *
  65. *
  66. * @see http://researcher.watson.ibm.com/researcher/files/us-bacon/Bacon01Concurrent.pdf
  67. */
  68. #include "zend.h"
  69. #include "zend_API.h"
  70. #ifndef GC_BENCH
  71. # define GC_BENCH 0
  72. #endif
  73. #ifndef ZEND_GC_DEBUG
  74. # define ZEND_GC_DEBUG 0
  75. #endif
  76. /* GC_INFO layout */
  77. #define GC_ADDRESS 0x0fffff
  78. #define GC_COLOR 0x300000
  79. #define GC_BLACK 0x000000 /* must be zero */
  80. #define GC_WHITE 0x100000
  81. #define GC_GREY 0x200000
  82. #define GC_PURPLE 0x300000
  83. /* Debug tracing */
  84. #if ZEND_GC_DEBUG > 1
  85. # define GC_TRACE(format, ...) fprintf(stderr, format "\n", ##__VA_ARGS__);
  86. # define GC_TRACE_REF(ref, format, ...) \
  87. do { \
  88. gc_trace_ref((zend_refcounted *) ref); \
  89. fprintf(stderr, format "\n", ##__VA_ARGS__); \
  90. } while (0)
  91. # define GC_TRACE_SET_COLOR(ref, color) \
  92. GC_TRACE_REF(ref, "->%s", gc_color_name(color))
  93. #else
  94. # define GC_TRACE_REF(ref, format, ...)
  95. # define GC_TRACE_SET_COLOR(ref, new_color)
  96. # define GC_TRACE(str)
  97. #endif
  98. /* GC_INFO access */
  99. #define GC_REF_ADDRESS(ref) \
  100. (((GC_TYPE_INFO(ref)) & (GC_ADDRESS << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
  101. #define GC_REF_COLOR(ref) \
  102. (((GC_TYPE_INFO(ref)) & (GC_COLOR << GC_INFO_SHIFT)) >> GC_INFO_SHIFT)
  103. #define GC_REF_CHECK_COLOR(ref, color) \
  104. ((GC_TYPE_INFO(ref) & (GC_COLOR << GC_INFO_SHIFT)) == ((color) << GC_INFO_SHIFT))
  105. #define GC_REF_SET_INFO(ref, info) do { \
  106. GC_TYPE_INFO(ref) = \
  107. (GC_TYPE_INFO(ref) & (GC_TYPE_MASK | GC_FLAGS_MASK)) | \
  108. ((info) << GC_INFO_SHIFT); \
  109. } while (0)
  110. #define GC_REF_SET_COLOR(ref, c) do { \
  111. GC_TRACE_SET_COLOR(ref, c); \
  112. GC_TYPE_INFO(ref) = \
  113. (GC_TYPE_INFO(ref) & ~(GC_COLOR << GC_INFO_SHIFT)) | \
  114. ((c) << GC_INFO_SHIFT); \
  115. } while (0)
  116. #define GC_REF_SET_BLACK(ref) do { \
  117. GC_TRACE_SET_COLOR(ref, GC_BLACK); \
  118. GC_TYPE_INFO(ref) &= ~(GC_COLOR << GC_INFO_SHIFT); \
  119. } while (0)
  120. #define GC_REF_SET_PURPLE(ref) do { \
  121. GC_TRACE_SET_COLOR(ref, GC_PURPLE); \
  122. GC_TYPE_INFO(ref) |= (GC_COLOR << GC_INFO_SHIFT); \
  123. } while (0)
  124. /* bit stealing tags for gc_root_buffer.ref */
  125. #define GC_BITS 0x3
  126. #define GC_ROOT 0x0 /* possible root of circular garbage */
  127. #define GC_UNUSED 0x1 /* part of linked list of unused buffers */
  128. #define GC_GARBAGE 0x2 /* garbage to delete */
  129. #define GC_GET_PTR(ptr) \
  130. ((void*)(((uintptr_t)(ptr)) & ~GC_BITS))
  131. #define GC_IS_ROOT(ptr) \
  132. ((((uintptr_t)(ptr)) & GC_BITS) == GC_ROOT)
  133. #define GC_IS_UNUSED(ptr) \
  134. ((((uintptr_t)(ptr)) & GC_BITS) == GC_UNUSED)
  135. #define GC_IS_GARBAGE(ptr) \
  136. ((((uintptr_t)(ptr)) & GC_BITS) == GC_GARBAGE)
  137. #define GC_MAKE_GARBAGE(ptr) \
  138. ((void*)(((uintptr_t)(ptr)) | GC_GARBAGE))
  139. /* GC address conversion */
  140. #define GC_IDX2PTR(idx) (GC_G(buf) + (idx))
  141. #define GC_PTR2IDX(ptr) ((ptr) - GC_G(buf))
  142. #define GC_IDX2LIST(idx) ((void*)(uintptr_t)(((idx) * sizeof(void*)) | GC_UNUSED))
  143. #define GC_LIST2IDX(list) (((uint32_t)(uintptr_t)(list)) / sizeof(void*))
  144. /* GC buffers */
  145. #define GC_INVALID 0
  146. #define GC_FIRST_ROOT 1
  147. #define GC_DEFAULT_BUF_SIZE (16 * 1024)
  148. #define GC_BUF_GROW_STEP (128 * 1024)
  149. #define GC_MAX_UNCOMPRESSED (512 * 1024)
  150. #define GC_MAX_BUF_SIZE 0x40000000
  151. #define GC_THRESHOLD_DEFAULT 10000
  152. #define GC_THRESHOLD_STEP 10000
  153. #define GC_THRESHOLD_MAX 1000000000
  154. #define GC_THRESHOLD_TRIGGER 100
  155. /* GC flags */
  156. #define GC_HAS_DESTRUCTORS (1<<0)
  157. /* unused buffers */
  158. #define GC_HAS_UNUSED() \
  159. (GC_G(unused) != GC_INVALID)
  160. #define GC_FETCH_UNUSED() \
  161. gc_fetch_unused()
  162. #define GC_LINK_UNUSED(root) \
  163. gc_link_unused(root)
  164. #define GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD() \
  165. (GC_G(first_unused) < GC_G(gc_threshold))
  166. #define GC_HAS_NEXT_UNUSED() \
  167. (GC_G(first_unused) != GC_G(buf_size))
  168. #define GC_FETCH_NEXT_UNUSED() \
  169. gc_fetch_next_unused()
  170. ZEND_API int (*gc_collect_cycles)(void);
  171. typedef struct _gc_root_buffer {
  172. zend_refcounted *ref;
  173. } gc_root_buffer;
  174. typedef struct _zend_gc_globals {
  175. zend_bool gc_enabled;
  176. zend_bool gc_active; /* GC currently running, forbid nested GC */
  177. zend_bool gc_protected; /* GC protected, forbid root additions */
  178. zend_bool gc_full;
  179. gc_root_buffer *buf; /* preallocated arrays of buffers */
  180. uint32_t unused; /* linked list of unused buffers */
  181. uint32_t first_unused; /* first unused buffer */
  182. uint32_t gc_threshold; /* GC collection threshold */
  183. uint32_t buf_size; /* size of the GC buffer */
  184. uint32_t num_roots; /* number of roots in GC buffer */
  185. uint32_t gc_runs;
  186. uint32_t collected;
  187. #if GC_BENCH
  188. uint32_t root_buf_length;
  189. uint32_t root_buf_peak;
  190. uint32_t zval_possible_root;
  191. uint32_t zval_buffered;
  192. uint32_t zval_remove_from_buffer;
  193. uint32_t zval_marked_grey;
  194. #endif
  195. } zend_gc_globals;
  196. #ifdef ZTS
  197. static int gc_globals_id;
  198. #define GC_G(v) ZEND_TSRMG(gc_globals_id, zend_gc_globals *, v)
  199. #else
  200. #define GC_G(v) (gc_globals.v)
  201. static zend_gc_globals gc_globals;
  202. #endif
  203. #if GC_BENCH
  204. # define GC_BENCH_INC(counter) GC_G(counter)++
  205. # define GC_BENCH_DEC(counter) GC_G(counter)--
  206. # define GC_BENCH_PEAK(peak, counter) do { \
  207. if (GC_G(counter) > GC_G(peak)) { \
  208. GC_G(peak) = GC_G(counter); \
  209. } \
  210. } while (0)
  211. #else
  212. # define GC_BENCH_INC(counter)
  213. # define GC_BENCH_DEC(counter)
  214. # define GC_BENCH_PEAK(peak, counter)
  215. #endif
  216. #define GC_STACK_SEGMENT_SIZE (((4096 - ZEND_MM_OVERHEAD) / sizeof(void*)) - 2)
  217. typedef struct _gc_stack gc_stack;
  218. struct _gc_stack {
  219. gc_stack *prev;
  220. gc_stack *next;
  221. zend_refcounted *data[GC_STACK_SEGMENT_SIZE];
  222. };
  223. #define GC_STACK_DCL(init) \
  224. gc_stack *_stack = init; \
  225. size_t _top = 0;
  226. #define GC_STACK_PUSH(ref) \
  227. gc_stack_push(&_stack, &_top, ref);
  228. #define GC_STACK_POP() \
  229. gc_stack_pop(&_stack, &_top)
  230. static zend_never_inline gc_stack* gc_stack_next(gc_stack *stack)
  231. {
  232. if (UNEXPECTED(!stack->next)) {
  233. gc_stack *segment = emalloc(sizeof(gc_stack));
  234. segment->prev = stack;
  235. segment->next = NULL;
  236. stack->next = segment;
  237. }
  238. return stack->next;
  239. }
  240. static zend_always_inline void gc_stack_push(gc_stack **stack, size_t *top, zend_refcounted *ref)
  241. {
  242. if (UNEXPECTED(*top == GC_STACK_SEGMENT_SIZE)) {
  243. (*stack) = gc_stack_next(*stack);
  244. (*top) = 0;
  245. }
  246. (*stack)->data[(*top)++] = ref;
  247. }
  248. static zend_always_inline zend_refcounted* gc_stack_pop(gc_stack **stack, size_t *top)
  249. {
  250. if (UNEXPECTED((*top) == 0)) {
  251. if (!(*stack)->prev) {
  252. return NULL;
  253. } else {
  254. (*stack) = (*stack)->prev;
  255. (*top) = GC_STACK_SEGMENT_SIZE - 1;
  256. return (*stack)->data[GC_STACK_SEGMENT_SIZE - 1];
  257. }
  258. } else {
  259. return (*stack)->data[--(*top)];
  260. }
  261. }
  262. static void gc_stack_free(gc_stack *stack)
  263. {
  264. gc_stack *p = stack->next;
  265. while (p) {
  266. stack = p->next;
  267. efree(p);
  268. p = stack;
  269. }
  270. }
  271. static zend_always_inline uint32_t gc_compress(uint32_t idx)
  272. {
  273. if (EXPECTED(idx < GC_MAX_UNCOMPRESSED)) {
  274. return idx;
  275. }
  276. return (idx % GC_MAX_UNCOMPRESSED) | GC_MAX_UNCOMPRESSED;
  277. }
  278. static zend_always_inline gc_root_buffer* gc_decompress(zend_refcounted *ref, uint32_t idx)
  279. {
  280. gc_root_buffer *root = GC_IDX2PTR(idx);
  281. if (EXPECTED(GC_GET_PTR(root->ref) == ref)) {
  282. return root;
  283. }
  284. while (1) {
  285. idx += GC_MAX_UNCOMPRESSED;
  286. ZEND_ASSERT(idx < GC_G(first_unused));
  287. root = GC_IDX2PTR(idx);
  288. if (GC_GET_PTR(root->ref) == ref) {
  289. return root;
  290. }
  291. }
  292. }
  293. static zend_always_inline uint32_t gc_fetch_unused(void)
  294. {
  295. uint32_t idx;
  296. gc_root_buffer *root;
  297. ZEND_ASSERT(GC_HAS_UNUSED());
  298. idx = GC_G(unused);
  299. root = GC_IDX2PTR(idx);
  300. ZEND_ASSERT(GC_IS_UNUSED(root->ref));
  301. GC_G(unused) = GC_LIST2IDX(root->ref);
  302. return idx;
  303. }
  304. static zend_always_inline void gc_link_unused(gc_root_buffer *root)
  305. {
  306. root->ref = GC_IDX2LIST(GC_G(unused));
  307. GC_G(unused) = GC_PTR2IDX(root);
  308. }
  309. static zend_always_inline uint32_t gc_fetch_next_unused(void)
  310. {
  311. uint32_t idx;
  312. ZEND_ASSERT(GC_HAS_NEXT_UNUSED());
  313. idx = GC_G(first_unused);
  314. GC_G(first_unused) = GC_G(first_unused) + 1;
  315. return idx;
  316. }
  317. #if ZEND_GC_DEBUG > 1
  318. static const char *gc_color_name(uint32_t color) {
  319. switch (color) {
  320. case GC_BLACK: return "black";
  321. case GC_WHITE: return "white";
  322. case GC_GREY: return "grey";
  323. case GC_PURPLE: return "purple";
  324. default: return "unknown";
  325. }
  326. }
  327. static void gc_trace_ref(zend_refcounted *ref) {
  328. if (GC_TYPE(ref) == IS_OBJECT) {
  329. zend_object *obj = (zend_object *) ref;
  330. fprintf(stderr, "[%p] rc=%d addr=%d %s object(%s)#%d ",
  331. ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
  332. gc_color_name(GC_REF_COLOR(ref)),
  333. obj->ce->name->val, obj->handle);
  334. } else if (GC_TYPE(ref) == IS_ARRAY) {
  335. zend_array *arr = (zend_array *) ref;
  336. fprintf(stderr, "[%p] rc=%d addr=%d %s array(%d) ",
  337. ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
  338. gc_color_name(GC_REF_COLOR(ref)),
  339. zend_hash_num_elements(arr));
  340. } else {
  341. fprintf(stderr, "[%p] rc=%d addr=%d %s %s ",
  342. ref, GC_REFCOUNT(ref), GC_REF_ADDRESS(ref),
  343. gc_color_name(GC_REF_COLOR(ref)),
  344. zend_get_type_by_const(GC_TYPE(ref)));
  345. }
  346. }
  347. #endif
  348. static zend_always_inline void gc_remove_from_roots(gc_root_buffer *root)
  349. {
  350. GC_LINK_UNUSED(root);
  351. GC_G(num_roots)--;
  352. GC_BENCH_DEC(root_buf_length);
  353. }
  354. static void root_buffer_dtor(zend_gc_globals *gc_globals)
  355. {
  356. if (gc_globals->buf) {
  357. free(gc_globals->buf);
  358. gc_globals->buf = NULL;
  359. }
  360. }
  361. static void gc_globals_ctor_ex(zend_gc_globals *gc_globals)
  362. {
  363. gc_globals->gc_enabled = 0;
  364. gc_globals->gc_active = 0;
  365. gc_globals->gc_protected = 1;
  366. gc_globals->gc_full = 0;
  367. gc_globals->buf = NULL;
  368. gc_globals->unused = GC_INVALID;
  369. gc_globals->first_unused = GC_INVALID;
  370. gc_globals->gc_threshold = GC_INVALID;
  371. gc_globals->buf_size = GC_INVALID;
  372. gc_globals->num_roots = 0;
  373. gc_globals->gc_runs = 0;
  374. gc_globals->collected = 0;
  375. #if GC_BENCH
  376. gc_globals->root_buf_length = 0;
  377. gc_globals->root_buf_peak = 0;
  378. gc_globals->zval_possible_root = 0;
  379. gc_globals->zval_buffered = 0;
  380. gc_globals->zval_remove_from_buffer = 0;
  381. gc_globals->zval_marked_grey = 0;
  382. #endif
  383. }
  384. void gc_globals_ctor(void)
  385. {
  386. #ifdef ZTS
  387. ts_allocate_id(&gc_globals_id, sizeof(zend_gc_globals), (ts_allocate_ctor) gc_globals_ctor_ex, (ts_allocate_dtor) root_buffer_dtor);
  388. #else
  389. gc_globals_ctor_ex(&gc_globals);
  390. #endif
  391. }
  392. void gc_globals_dtor(void)
  393. {
  394. #ifndef ZTS
  395. root_buffer_dtor(&gc_globals);
  396. #endif
  397. }
  398. void gc_reset(void)
  399. {
  400. if (GC_G(buf)) {
  401. GC_G(gc_active) = 0;
  402. GC_G(gc_protected) = 0;
  403. GC_G(gc_full) = 0;
  404. GC_G(unused) = GC_INVALID;
  405. GC_G(first_unused) = GC_FIRST_ROOT;
  406. GC_G(num_roots) = 0;
  407. GC_G(gc_runs) = 0;
  408. GC_G(collected) = 0;
  409. #if GC_BENCH
  410. GC_G(root_buf_length) = 0;
  411. GC_G(root_buf_peak) = 0;
  412. GC_G(zval_possible_root) = 0;
  413. GC_G(zval_buffered) = 0;
  414. GC_G(zval_remove_from_buffer) = 0;
  415. GC_G(zval_marked_grey) = 0;
  416. #endif
  417. }
  418. }
  419. ZEND_API zend_bool gc_enable(zend_bool enable)
  420. {
  421. zend_bool old_enabled = GC_G(gc_enabled);
  422. GC_G(gc_enabled) = enable;
  423. if (enable && !old_enabled && GC_G(buf) == NULL) {
  424. GC_G(buf) = (gc_root_buffer*) pemalloc(sizeof(gc_root_buffer) * GC_DEFAULT_BUF_SIZE, 1);
  425. GC_G(buf)[0].ref = NULL;
  426. GC_G(buf_size) = GC_DEFAULT_BUF_SIZE;
  427. GC_G(gc_threshold) = GC_THRESHOLD_DEFAULT + GC_FIRST_ROOT;
  428. gc_reset();
  429. }
  430. return old_enabled;
  431. }
  432. ZEND_API zend_bool gc_enabled(void)
  433. {
  434. return GC_G(gc_enabled);
  435. }
  436. ZEND_API zend_bool gc_protect(zend_bool protect)
  437. {
  438. zend_bool old_protected = GC_G(gc_protected);
  439. GC_G(gc_protected) = protect;
  440. return old_protected;
  441. }
  442. ZEND_API zend_bool gc_protected(void)
  443. {
  444. return GC_G(gc_protected);
  445. }
  446. static void gc_grow_root_buffer(void)
  447. {
  448. size_t new_size;
  449. if (GC_G(buf_size) >= GC_MAX_BUF_SIZE) {
  450. if (!GC_G(gc_full)) {
  451. zend_error(E_WARNING, "GC buffer overflow (GC disabled)\n");
  452. GC_G(gc_active) = 1;
  453. GC_G(gc_protected) = 1;
  454. GC_G(gc_full) = 1;
  455. return;
  456. }
  457. }
  458. if (GC_G(buf_size) < GC_BUF_GROW_STEP) {
  459. new_size = GC_G(buf_size) * 2;
  460. } else {
  461. new_size = GC_G(buf_size) + GC_BUF_GROW_STEP;
  462. }
  463. if (new_size > GC_MAX_BUF_SIZE) {
  464. new_size = GC_MAX_BUF_SIZE;
  465. }
  466. GC_G(buf) = perealloc(GC_G(buf), sizeof(gc_root_buffer) * new_size, 1);
  467. GC_G(buf_size) = new_size;
  468. }
  469. static void gc_adjust_threshold(int count)
  470. {
  471. uint32_t new_threshold;
  472. /* TODO Very simple heuristic for dynamic GC buffer resizing:
  473. * If there are "too few" collections, increase the collection threshold
  474. * by a fixed step */
  475. if (count < GC_THRESHOLD_TRIGGER) {
  476. /* increase */
  477. if (GC_G(gc_threshold) < GC_THRESHOLD_MAX) {
  478. new_threshold = GC_G(gc_threshold) + GC_THRESHOLD_STEP;
  479. if (new_threshold > GC_THRESHOLD_MAX) {
  480. new_threshold = GC_THRESHOLD_MAX;
  481. }
  482. if (new_threshold > GC_G(buf_size)) {
  483. gc_grow_root_buffer();
  484. }
  485. if (new_threshold <= GC_G(buf_size)) {
  486. GC_G(gc_threshold) = new_threshold;
  487. }
  488. }
  489. } else if (GC_G(gc_threshold) > GC_THRESHOLD_DEFAULT) {
  490. new_threshold = GC_G(gc_threshold) - GC_THRESHOLD_STEP;
  491. if (new_threshold < GC_THRESHOLD_DEFAULT) {
  492. new_threshold = GC_THRESHOLD_DEFAULT;
  493. }
  494. GC_G(gc_threshold) = new_threshold;
  495. }
  496. }
  497. static zend_never_inline void ZEND_FASTCALL gc_possible_root_when_full(zend_refcounted *ref)
  498. {
  499. uint32_t idx;
  500. gc_root_buffer *newRoot;
  501. ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
  502. ZEND_ASSERT(GC_INFO(ref) == 0);
  503. if (GC_G(gc_enabled) && !GC_G(gc_active)) {
  504. GC_ADDREF(ref);
  505. gc_adjust_threshold(gc_collect_cycles());
  506. if (UNEXPECTED(GC_DELREF(ref)) == 0) {
  507. rc_dtor_func(ref);
  508. return;
  509. } else if (UNEXPECTED(GC_INFO(ref))) {
  510. return;
  511. }
  512. }
  513. if (GC_HAS_UNUSED()) {
  514. idx = GC_FETCH_UNUSED();
  515. } else if (EXPECTED(GC_HAS_NEXT_UNUSED())) {
  516. idx = GC_FETCH_NEXT_UNUSED();
  517. } else {
  518. gc_grow_root_buffer();
  519. if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
  520. return;
  521. }
  522. idx = GC_FETCH_NEXT_UNUSED();
  523. }
  524. newRoot = GC_IDX2PTR(idx);
  525. newRoot->ref = ref; /* GC_ROOT tag is 0 */
  526. GC_TRACE_SET_COLOR(ref, GC_PURPLE);
  527. idx = gc_compress(idx);
  528. GC_REF_SET_INFO(ref, idx | GC_PURPLE);
  529. GC_G(num_roots)++;
  530. GC_BENCH_INC(zval_buffered);
  531. GC_BENCH_INC(root_buf_length);
  532. GC_BENCH_PEAK(root_buf_peak, root_buf_length);
  533. }
  534. ZEND_API void ZEND_FASTCALL gc_possible_root(zend_refcounted *ref)
  535. {
  536. uint32_t idx;
  537. gc_root_buffer *newRoot;
  538. if (UNEXPECTED(GC_G(gc_protected))) {
  539. return;
  540. }
  541. GC_BENCH_INC(zval_possible_root);
  542. if (EXPECTED(GC_HAS_UNUSED())) {
  543. idx = GC_FETCH_UNUSED();
  544. } else if (EXPECTED(GC_HAS_NEXT_UNUSED_UNDER_THRESHOLD())) {
  545. idx = GC_FETCH_NEXT_UNUSED();
  546. } else {
  547. gc_possible_root_when_full(ref);
  548. return;
  549. }
  550. ZEND_ASSERT(GC_TYPE(ref) == IS_ARRAY || GC_TYPE(ref) == IS_OBJECT);
  551. ZEND_ASSERT(GC_INFO(ref) == 0);
  552. newRoot = GC_IDX2PTR(idx);
  553. newRoot->ref = ref; /* GC_ROOT tag is 0 */
  554. GC_TRACE_SET_COLOR(ref, GC_PURPLE);
  555. idx = gc_compress(idx);
  556. GC_REF_SET_INFO(ref, idx | GC_PURPLE);
  557. GC_G(num_roots)++;
  558. GC_BENCH_INC(zval_buffered);
  559. GC_BENCH_INC(root_buf_length);
  560. GC_BENCH_PEAK(root_buf_peak, root_buf_length);
  561. }
  562. static zend_never_inline void ZEND_FASTCALL gc_remove_compressed(zend_refcounted *ref, uint32_t idx)
  563. {
  564. gc_root_buffer *root = gc_decompress(ref, idx);
  565. gc_remove_from_roots(root);
  566. }
  567. ZEND_API void ZEND_FASTCALL gc_remove_from_buffer(zend_refcounted *ref)
  568. {
  569. gc_root_buffer *root;
  570. uint32_t idx = GC_REF_ADDRESS(ref);
  571. GC_BENCH_INC(zval_remove_from_buffer);
  572. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  573. GC_TRACE_SET_COLOR(ref, GC_BLACK);
  574. }
  575. GC_REF_SET_INFO(ref, 0);
  576. /* Perform decompression only in case of large buffers */
  577. if (UNEXPECTED(GC_G(first_unused) >= GC_MAX_UNCOMPRESSED)) {
  578. gc_remove_compressed(ref, idx);
  579. return;
  580. }
  581. ZEND_ASSERT(idx);
  582. root = GC_IDX2PTR(idx);
  583. gc_remove_from_roots(root);
  584. }
  585. static void gc_scan_black(zend_refcounted *ref, gc_stack *stack)
  586. {
  587. HashTable *ht = NULL;
  588. Bucket *p, *end;
  589. zval *zv;
  590. GC_STACK_DCL(stack);
  591. tail_call:
  592. if (GC_TYPE(ref) == IS_OBJECT) {
  593. zend_object_get_gc_t get_gc;
  594. zend_object *obj = (zend_object*)ref;
  595. if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
  596. (get_gc = obj->handlers->get_gc) != NULL)) {
  597. int n;
  598. zval *zv, *end;
  599. zval tmp;
  600. ZVAL_OBJ(&tmp, obj);
  601. ht = get_gc(&tmp, &zv, &n);
  602. end = zv + n;
  603. if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
  604. ht = NULL;
  605. if (!n) goto next;
  606. while (!Z_REFCOUNTED_P(--end)) {
  607. if (zv == end) goto next;
  608. }
  609. } else {
  610. GC_REF_SET_BLACK(ht);
  611. }
  612. while (zv != end) {
  613. if (Z_REFCOUNTED_P(zv)) {
  614. ref = Z_COUNTED_P(zv);
  615. GC_ADDREF(ref);
  616. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  617. GC_REF_SET_BLACK(ref);
  618. GC_STACK_PUSH(ref);
  619. }
  620. }
  621. zv++;
  622. }
  623. if (EXPECTED(!ht)) {
  624. ref = Z_COUNTED_P(zv);
  625. GC_ADDREF(ref);
  626. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  627. GC_REF_SET_BLACK(ref);
  628. goto tail_call;
  629. }
  630. goto next;
  631. }
  632. } else {
  633. goto next;
  634. }
  635. } else if (GC_TYPE(ref) == IS_ARRAY) {
  636. if ((zend_array*)ref != &EG(symbol_table)) {
  637. ht = (zend_array*)ref;
  638. } else {
  639. goto next;
  640. }
  641. } else if (GC_TYPE(ref) == IS_REFERENCE) {
  642. if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
  643. ref = Z_COUNTED(((zend_reference*)ref)->val);
  644. GC_ADDREF(ref);
  645. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  646. GC_REF_SET_BLACK(ref);
  647. goto tail_call;
  648. }
  649. }
  650. goto next;
  651. } else {
  652. goto next;
  653. }
  654. if (!ht->nNumUsed) goto next;
  655. p = ht->arData;
  656. end = p + ht->nNumUsed;
  657. while (1) {
  658. end--;
  659. zv = &end->val;
  660. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  661. zv = Z_INDIRECT_P(zv);
  662. }
  663. if (Z_REFCOUNTED_P(zv)) {
  664. break;
  665. }
  666. if (p == end) goto next;
  667. }
  668. while (p != end) {
  669. zv = &p->val;
  670. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  671. zv = Z_INDIRECT_P(zv);
  672. }
  673. if (Z_REFCOUNTED_P(zv)) {
  674. ref = Z_COUNTED_P(zv);
  675. GC_ADDREF(ref);
  676. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  677. GC_REF_SET_BLACK(ref);
  678. GC_STACK_PUSH(ref);
  679. }
  680. }
  681. p++;
  682. }
  683. zv = &p->val;
  684. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  685. zv = Z_INDIRECT_P(zv);
  686. }
  687. ref = Z_COUNTED_P(zv);
  688. GC_ADDREF(ref);
  689. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  690. GC_REF_SET_BLACK(ref);
  691. goto tail_call;
  692. }
  693. next:
  694. ref = GC_STACK_POP();
  695. if (ref) {
  696. goto tail_call;
  697. }
  698. }
  699. static void gc_mark_grey(zend_refcounted *ref, gc_stack *stack)
  700. {
  701. HashTable *ht = NULL;
  702. Bucket *p, *end;
  703. zval *zv;
  704. GC_STACK_DCL(stack);
  705. do {
  706. GC_BENCH_INC(zval_marked_grey);
  707. if (GC_TYPE(ref) == IS_OBJECT) {
  708. zend_object_get_gc_t get_gc;
  709. zend_object *obj = (zend_object*)ref;
  710. if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
  711. (get_gc = obj->handlers->get_gc) != NULL)) {
  712. int n;
  713. zval *zv, *end;
  714. zval tmp;
  715. ZVAL_OBJ(&tmp, obj);
  716. ht = get_gc(&tmp, &zv, &n);
  717. end = zv + n;
  718. if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_GREY))) {
  719. ht = NULL;
  720. if (!n) goto next;
  721. while (!Z_REFCOUNTED_P(--end)) {
  722. if (zv == end) goto next;
  723. }
  724. } else {
  725. GC_REF_SET_COLOR(ht, GC_GREY);
  726. }
  727. while (zv != end) {
  728. if (Z_REFCOUNTED_P(zv)) {
  729. ref = Z_COUNTED_P(zv);
  730. ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
  731. GC_DELREF(ref);
  732. if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  733. GC_REF_SET_COLOR(ref, GC_GREY);
  734. GC_STACK_PUSH(ref);
  735. }
  736. }
  737. zv++;
  738. }
  739. if (EXPECTED(!ht)) {
  740. ref = Z_COUNTED_P(zv);
  741. ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
  742. GC_DELREF(ref);
  743. if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  744. GC_REF_SET_COLOR(ref, GC_GREY);
  745. continue;
  746. }
  747. goto next;
  748. }
  749. } else {
  750. goto next;
  751. }
  752. } else if (GC_TYPE(ref) == IS_ARRAY) {
  753. if (((zend_array*)ref) == &EG(symbol_table)) {
  754. GC_REF_SET_BLACK(ref);
  755. goto next;
  756. } else {
  757. ht = (zend_array*)ref;
  758. }
  759. } else if (GC_TYPE(ref) == IS_REFERENCE) {
  760. if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
  761. ref = Z_COUNTED(((zend_reference*)ref)->val);
  762. ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
  763. GC_DELREF(ref);
  764. if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  765. GC_REF_SET_COLOR(ref, GC_GREY);
  766. continue;
  767. }
  768. }
  769. goto next;
  770. } else {
  771. goto next;
  772. }
  773. if (!ht->nNumUsed) goto next;
  774. p = ht->arData;
  775. end = p + ht->nNumUsed;
  776. while (1) {
  777. end--;
  778. zv = &end->val;
  779. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  780. zv = Z_INDIRECT_P(zv);
  781. }
  782. if (Z_REFCOUNTED_P(zv)) {
  783. break;
  784. }
  785. if (p == end) goto next;
  786. }
  787. while (p != end) {
  788. zv = &p->val;
  789. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  790. zv = Z_INDIRECT_P(zv);
  791. }
  792. if (Z_REFCOUNTED_P(zv)) {
  793. ref = Z_COUNTED_P(zv);
  794. ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
  795. GC_DELREF(ref);
  796. if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  797. GC_REF_SET_COLOR(ref, GC_GREY);
  798. GC_STACK_PUSH(ref);
  799. }
  800. }
  801. p++;
  802. }
  803. zv = &p->val;
  804. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  805. zv = Z_INDIRECT_P(zv);
  806. }
  807. ref = Z_COUNTED_P(zv);
  808. ZEND_ASSERT(GC_REFCOUNT(ref) > 0);
  809. GC_DELREF(ref);
  810. if (!GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  811. GC_REF_SET_COLOR(ref, GC_GREY);
  812. continue;
  813. }
  814. next:
  815. ref = GC_STACK_POP();
  816. } while (ref);
  817. }
  818. /* Two-Finger compaction algorithm */
  819. static void gc_compact(void)
  820. {
  821. if (GC_G(num_roots) + GC_FIRST_ROOT != GC_G(first_unused)) {
  822. if (GC_G(num_roots)) {
  823. gc_root_buffer *free = GC_IDX2PTR(GC_FIRST_ROOT);
  824. gc_root_buffer *scan = GC_IDX2PTR(GC_G(first_unused) - 1);
  825. gc_root_buffer *end = GC_IDX2PTR(GC_G(num_roots));
  826. uint32_t idx;
  827. zend_refcounted *p;
  828. while (free < scan) {
  829. while (!GC_IS_UNUSED(free->ref)) {
  830. free++;
  831. }
  832. while (GC_IS_UNUSED(scan->ref)) {
  833. scan--;
  834. }
  835. if (scan > free) {
  836. p = scan->ref;
  837. free->ref = p;
  838. p = GC_GET_PTR(p);
  839. idx = gc_compress(GC_PTR2IDX(free));
  840. GC_REF_SET_INFO(p, idx | GC_REF_COLOR(p));
  841. free++;
  842. scan--;
  843. if (scan <= end) {
  844. break;
  845. }
  846. }
  847. }
  848. }
  849. GC_G(unused) = GC_INVALID;
  850. GC_G(first_unused) = GC_G(num_roots) + GC_FIRST_ROOT;
  851. }
  852. }
  853. static void gc_mark_roots(gc_stack *stack)
  854. {
  855. gc_root_buffer *current, *last;
  856. gc_compact();
  857. current = GC_IDX2PTR(GC_FIRST_ROOT);
  858. last = GC_IDX2PTR(GC_G(first_unused));
  859. while (current != last) {
  860. if (GC_IS_ROOT(current->ref)) {
  861. if (GC_REF_CHECK_COLOR(current->ref, GC_PURPLE)) {
  862. GC_REF_SET_COLOR(current->ref, GC_GREY);
  863. gc_mark_grey(current->ref, stack);
  864. }
  865. }
  866. current++;
  867. }
  868. }
  869. static void gc_scan(zend_refcounted *ref, gc_stack *stack)
  870. {
  871. HashTable *ht = NULL;
  872. Bucket *p, *end;
  873. zval *zv;
  874. GC_STACK_DCL(stack);
  875. tail_call:
  876. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  877. if (GC_REFCOUNT(ref) > 0) {
  878. if (!GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  879. GC_REF_SET_BLACK(ref);
  880. if (UNEXPECTED(!_stack->next)) {
  881. gc_stack_next(_stack);
  882. }
  883. /* Split stack and reuse the tail */
  884. _stack->next->prev = NULL;
  885. gc_scan_black(ref, _stack->next);
  886. _stack->next->prev = _stack;
  887. }
  888. } else {
  889. if (GC_TYPE(ref) == IS_OBJECT) {
  890. zend_object_get_gc_t get_gc;
  891. zend_object *obj = (zend_object*)ref;
  892. if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
  893. (get_gc = obj->handlers->get_gc) != NULL)) {
  894. int n;
  895. zval *zv, *end;
  896. zval tmp;
  897. ZVAL_OBJ(&tmp, obj);
  898. ht = get_gc(&tmp, &zv, &n);
  899. end = zv + n;
  900. if (EXPECTED(!ht) || UNEXPECTED(!GC_REF_CHECK_COLOR(ht, GC_GREY))) {
  901. ht = NULL;
  902. if (!n) goto next;
  903. while (!Z_REFCOUNTED_P(--end)) {
  904. if (zv == end) goto next;
  905. }
  906. } else {
  907. GC_REF_SET_COLOR(ht, GC_WHITE);
  908. }
  909. while (zv != end) {
  910. if (Z_REFCOUNTED_P(zv)) {
  911. ref = Z_COUNTED_P(zv);
  912. if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  913. GC_REF_SET_COLOR(ref, GC_WHITE);
  914. GC_STACK_PUSH(ref);
  915. }
  916. }
  917. zv++;
  918. }
  919. if (EXPECTED(!ht)) {
  920. ref = Z_COUNTED_P(zv);
  921. if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  922. GC_REF_SET_COLOR(ref, GC_WHITE);
  923. goto tail_call;
  924. }
  925. goto next;
  926. }
  927. } else {
  928. goto next;
  929. }
  930. } else if (GC_TYPE(ref) == IS_ARRAY) {
  931. if ((zend_array*)ref == &EG(symbol_table)) {
  932. GC_REF_SET_BLACK(ref);
  933. goto next;
  934. } else {
  935. ht = (zend_array*)ref;
  936. }
  937. } else if (GC_TYPE(ref) == IS_REFERENCE) {
  938. if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
  939. ref = Z_COUNTED(((zend_reference*)ref)->val);
  940. if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  941. GC_REF_SET_COLOR(ref, GC_WHITE);
  942. goto tail_call;
  943. }
  944. }
  945. goto next;
  946. } else {
  947. goto next;
  948. }
  949. if (!ht->nNumUsed) goto next;
  950. p = ht->arData;
  951. end = p + ht->nNumUsed;
  952. while (1) {
  953. end--;
  954. zv = &end->val;
  955. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  956. zv = Z_INDIRECT_P(zv);
  957. }
  958. if (Z_REFCOUNTED_P(zv)) {
  959. break;
  960. }
  961. if (p == end) goto next;
  962. }
  963. while (p != end) {
  964. zv = &p->val;
  965. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  966. zv = Z_INDIRECT_P(zv);
  967. }
  968. if (Z_REFCOUNTED_P(zv)) {
  969. ref = Z_COUNTED_P(zv);
  970. if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  971. GC_REF_SET_COLOR(ref, GC_WHITE);
  972. GC_STACK_PUSH(ref);
  973. }
  974. }
  975. p++;
  976. }
  977. zv = &p->val;
  978. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  979. zv = Z_INDIRECT_P(zv);
  980. }
  981. ref = Z_COUNTED_P(zv);
  982. if (GC_REF_CHECK_COLOR(ref, GC_GREY)) {
  983. GC_REF_SET_COLOR(ref, GC_WHITE);
  984. goto tail_call;
  985. }
  986. }
  987. }
  988. next:
  989. ref = GC_STACK_POP();
  990. if (ref) {
  991. goto tail_call;
  992. }
  993. }
  994. static void gc_scan_roots(gc_stack *stack)
  995. {
  996. gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
  997. gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
  998. while (current != last) {
  999. if (GC_IS_ROOT(current->ref)) {
  1000. if (GC_REF_CHECK_COLOR(current->ref, GC_GREY)) {
  1001. GC_REF_SET_COLOR(current->ref, GC_WHITE);
  1002. gc_scan(current->ref, stack);
  1003. }
  1004. }
  1005. current++;
  1006. }
  1007. }
  1008. static void gc_add_garbage(zend_refcounted *ref)
  1009. {
  1010. uint32_t idx;
  1011. gc_root_buffer *buf;
  1012. if (GC_HAS_UNUSED()) {
  1013. idx = GC_FETCH_UNUSED();
  1014. } else if (GC_HAS_NEXT_UNUSED()) {
  1015. idx = GC_FETCH_NEXT_UNUSED();
  1016. } else {
  1017. gc_grow_root_buffer();
  1018. if (UNEXPECTED(!GC_HAS_NEXT_UNUSED())) {
  1019. return;
  1020. }
  1021. idx = GC_FETCH_NEXT_UNUSED();
  1022. }
  1023. buf = GC_IDX2PTR(idx);
  1024. buf->ref = GC_MAKE_GARBAGE(ref);
  1025. idx = gc_compress(idx);
  1026. GC_REF_SET_INFO(ref, idx | GC_BLACK);
  1027. GC_G(num_roots)++;
  1028. }
  1029. static int gc_collect_white(zend_refcounted *ref, uint32_t *flags, gc_stack *stack)
  1030. {
  1031. int count = 0;
  1032. HashTable *ht = NULL;
  1033. Bucket *p, *end;
  1034. zval *zv;
  1035. GC_STACK_DCL(stack);
  1036. do {
  1037. /* don't count references for compatibility ??? */
  1038. if (GC_TYPE(ref) != IS_REFERENCE) {
  1039. count++;
  1040. }
  1041. if (GC_TYPE(ref) == IS_OBJECT) {
  1042. zend_object_get_gc_t get_gc;
  1043. zend_object *obj = (zend_object*)ref;
  1044. if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
  1045. (get_gc = obj->handlers->get_gc) != NULL)) {
  1046. int n;
  1047. zval *zv, *end;
  1048. zval tmp;
  1049. /* optimization: color is GC_BLACK (0) */
  1050. if (!GC_INFO(ref)) {
  1051. gc_add_garbage(ref);
  1052. }
  1053. if (obj->handlers->dtor_obj &&
  1054. ((obj->handlers->dtor_obj != zend_objects_destroy_object) ||
  1055. (obj->ce->destructor != NULL))) {
  1056. *flags |= GC_HAS_DESTRUCTORS;
  1057. }
  1058. ZVAL_OBJ(&tmp, obj);
  1059. ht = get_gc(&tmp, &zv, &n);
  1060. end = zv + n;
  1061. if (EXPECTED(!ht) || UNEXPECTED(GC_REF_CHECK_COLOR(ht, GC_BLACK))) {
  1062. ht = NULL;
  1063. if (!n) goto next;
  1064. while (!Z_REFCOUNTED_P(--end)) {
  1065. /* count non-refcounted for compatibility ??? */
  1066. if (Z_TYPE_P(zv) != IS_UNDEF) {
  1067. count++;
  1068. }
  1069. if (zv == end) goto next;
  1070. }
  1071. } else {
  1072. GC_REF_SET_BLACK(ht);
  1073. }
  1074. while (zv != end) {
  1075. if (Z_REFCOUNTED_P(zv)) {
  1076. ref = Z_COUNTED_P(zv);
  1077. GC_ADDREF(ref);
  1078. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1079. GC_REF_SET_BLACK(ref);
  1080. GC_STACK_PUSH(ref);
  1081. }
  1082. /* count non-refcounted for compatibility ??? */
  1083. } else if (Z_TYPE_P(zv) != IS_UNDEF) {
  1084. count++;
  1085. }
  1086. zv++;
  1087. }
  1088. if (EXPECTED(!ht)) {
  1089. ref = Z_COUNTED_P(zv);
  1090. GC_ADDREF(ref);
  1091. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1092. GC_REF_SET_BLACK(ref);
  1093. continue;
  1094. }
  1095. goto next;
  1096. }
  1097. } else {
  1098. goto next;
  1099. }
  1100. } else if (GC_TYPE(ref) == IS_ARRAY) {
  1101. /* optimization: color is GC_BLACK (0) */
  1102. if (!GC_INFO(ref)) {
  1103. gc_add_garbage(ref);
  1104. }
  1105. ht = (zend_array*)ref;
  1106. } else if (GC_TYPE(ref) == IS_REFERENCE) {
  1107. if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
  1108. ref = Z_COUNTED(((zend_reference*)ref)->val);
  1109. GC_ADDREF(ref);
  1110. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1111. GC_REF_SET_BLACK(ref);
  1112. continue;
  1113. }
  1114. }
  1115. goto next;
  1116. } else {
  1117. goto next;
  1118. }
  1119. if (!ht->nNumUsed) goto next;
  1120. p = ht->arData;
  1121. end = p + ht->nNumUsed;
  1122. while (1) {
  1123. end--;
  1124. zv = &end->val;
  1125. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1126. zv = Z_INDIRECT_P(zv);
  1127. }
  1128. if (Z_REFCOUNTED_P(zv)) {
  1129. break;
  1130. }
  1131. /* count non-refcounted for compatibility ??? */
  1132. if (Z_TYPE_P(zv) != IS_UNDEF) {
  1133. count++;
  1134. }
  1135. if (p == end) goto next;
  1136. }
  1137. while (p != end) {
  1138. zv = &p->val;
  1139. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1140. zv = Z_INDIRECT_P(zv);
  1141. }
  1142. if (Z_REFCOUNTED_P(zv)) {
  1143. ref = Z_COUNTED_P(zv);
  1144. GC_ADDREF(ref);
  1145. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1146. GC_REF_SET_BLACK(ref);
  1147. GC_STACK_PUSH(ref);
  1148. }
  1149. /* count non-refcounted for compatibility ??? */
  1150. } else if (Z_TYPE_P(zv) != IS_UNDEF) {
  1151. count++;
  1152. }
  1153. p++;
  1154. }
  1155. zv = &p->val;
  1156. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1157. zv = Z_INDIRECT_P(zv);
  1158. }
  1159. ref = Z_COUNTED_P(zv);
  1160. GC_ADDREF(ref);
  1161. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1162. GC_REF_SET_BLACK(ref);
  1163. continue;
  1164. }
  1165. next:
  1166. ref = GC_STACK_POP();
  1167. } while (ref);
  1168. return count;
  1169. }
  1170. static int gc_collect_roots(uint32_t *flags, gc_stack *stack)
  1171. {
  1172. uint32_t idx, end;
  1173. zend_refcounted *ref;
  1174. int count = 0;
  1175. gc_root_buffer *current = GC_IDX2PTR(GC_FIRST_ROOT);
  1176. gc_root_buffer *last = GC_IDX2PTR(GC_G(first_unused));
  1177. /* remove non-garbage from the list */
  1178. while (current != last) {
  1179. if (GC_IS_ROOT(current->ref)) {
  1180. if (GC_REF_CHECK_COLOR(current->ref, GC_BLACK)) {
  1181. GC_REF_SET_INFO(current->ref, 0); /* reset GC_ADDRESS() and keep GC_BLACK */
  1182. gc_remove_from_roots(current);
  1183. }
  1184. }
  1185. current++;
  1186. }
  1187. gc_compact();
  1188. /* Root buffer might be reallocated during gc_collect_white,
  1189. * make sure to reload pointers. */
  1190. idx = GC_FIRST_ROOT;
  1191. end = GC_G(first_unused);
  1192. while (idx != end) {
  1193. current = GC_IDX2PTR(idx);
  1194. ref = current->ref;
  1195. ZEND_ASSERT(GC_IS_ROOT(ref));
  1196. current->ref = GC_MAKE_GARBAGE(ref);
  1197. if (GC_REF_CHECK_COLOR(ref, GC_WHITE)) {
  1198. GC_REF_SET_BLACK(ref);
  1199. count += gc_collect_white(ref, flags, stack);
  1200. }
  1201. idx++;
  1202. }
  1203. return count;
  1204. }
  1205. static void gc_remove_nested_data_from_buffer(zend_refcounted *ref, gc_root_buffer *root)
  1206. {
  1207. HashTable *ht = NULL;
  1208. Bucket *p, *end;
  1209. zval *zv;
  1210. tail_call:
  1211. do {
  1212. if (root) {
  1213. GC_TRACE_REF(ref, "removing from buffer");
  1214. gc_remove_from_roots(root);
  1215. GC_REF_SET_INFO(ref, 0);
  1216. root = NULL;
  1217. } else if (GC_REF_ADDRESS(ref) != 0
  1218. && GC_REF_CHECK_COLOR(ref, GC_BLACK)) {
  1219. GC_TRACE_REF(ref, "removing from buffer");
  1220. GC_REMOVE_FROM_BUFFER(ref);
  1221. } else if (GC_TYPE(ref) == IS_REFERENCE) {
  1222. if (Z_REFCOUNTED(((zend_reference*)ref)->val)) {
  1223. ref = Z_COUNTED(((zend_reference*)ref)->val);
  1224. goto tail_call;
  1225. }
  1226. return;
  1227. } else {
  1228. return;
  1229. }
  1230. if (GC_TYPE(ref) == IS_OBJECT) {
  1231. zend_object_get_gc_t get_gc;
  1232. zend_object *obj = (zend_object*)ref;
  1233. if (EXPECTED(!(OBJ_FLAGS(ref) & IS_OBJ_FREE_CALLED) &&
  1234. (get_gc = obj->handlers->get_gc) != NULL)) {
  1235. int n;
  1236. zval *zv, *end;
  1237. zval tmp;
  1238. ZVAL_OBJ(&tmp, obj);
  1239. ht = get_gc(&tmp, &zv, &n);
  1240. end = zv + n;
  1241. if (EXPECTED(!ht)) {
  1242. if (!n) return;
  1243. while (!Z_REFCOUNTED_P(--end)) {
  1244. if (zv == end) return;
  1245. }
  1246. }
  1247. while (zv != end) {
  1248. if (Z_REFCOUNTED_P(zv)) {
  1249. ref = Z_COUNTED_P(zv);
  1250. gc_remove_nested_data_from_buffer(ref, NULL);
  1251. }
  1252. zv++;
  1253. }
  1254. if (EXPECTED(!ht)) {
  1255. ref = Z_COUNTED_P(zv);
  1256. goto tail_call;
  1257. }
  1258. if (GC_REF_ADDRESS(ht) != 0 && GC_REF_CHECK_COLOR(ht, GC_BLACK)) {
  1259. GC_TRACE_REF(ht, "removing from buffer");
  1260. GC_REMOVE_FROM_BUFFER(ht);
  1261. }
  1262. } else {
  1263. return;
  1264. }
  1265. } else if (GC_TYPE(ref) == IS_ARRAY) {
  1266. ht = (zend_array*)ref;
  1267. } else {
  1268. return;
  1269. }
  1270. if (!ht->nNumUsed) return;
  1271. p = ht->arData;
  1272. end = p + ht->nNumUsed;
  1273. while (1) {
  1274. end--;
  1275. zv = &end->val;
  1276. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1277. zv = Z_INDIRECT_P(zv);
  1278. }
  1279. if (Z_REFCOUNTED_P(zv)) {
  1280. break;
  1281. }
  1282. if (p == end) return;
  1283. }
  1284. while (p != end) {
  1285. zv = &p->val;
  1286. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1287. zv = Z_INDIRECT_P(zv);
  1288. }
  1289. if (Z_REFCOUNTED_P(zv)) {
  1290. ref = Z_COUNTED_P(zv);
  1291. gc_remove_nested_data_from_buffer(ref, NULL);
  1292. }
  1293. p++;
  1294. }
  1295. zv = &p->val;
  1296. if (Z_TYPE_P(zv) == IS_INDIRECT) {
  1297. zv = Z_INDIRECT_P(zv);
  1298. }
  1299. ref = Z_COUNTED_P(zv);
  1300. goto tail_call;
  1301. } while (0);
  1302. }
  1303. ZEND_API int zend_gc_collect_cycles(void)
  1304. {
  1305. int count = 0;
  1306. if (GC_G(num_roots)) {
  1307. gc_root_buffer *current, *last;
  1308. zend_refcounted *p;
  1309. uint32_t gc_flags = 0;
  1310. uint32_t idx, end;
  1311. gc_stack stack;
  1312. stack.prev = NULL;
  1313. stack.next = NULL;
  1314. if (GC_G(gc_active)) {
  1315. return 0;
  1316. }
  1317. GC_TRACE("Collecting cycles");
  1318. GC_G(gc_runs)++;
  1319. GC_G(gc_active) = 1;
  1320. GC_TRACE("Marking roots");
  1321. gc_mark_roots(&stack);
  1322. GC_TRACE("Scanning roots");
  1323. gc_scan_roots(&stack);
  1324. GC_TRACE("Collecting roots");
  1325. count = gc_collect_roots(&gc_flags, &stack);
  1326. gc_stack_free(&stack);
  1327. if (!GC_G(num_roots)) {
  1328. /* nothing to free */
  1329. GC_TRACE("Nothing to free");
  1330. GC_G(gc_active) = 0;
  1331. return 0;
  1332. }
  1333. end = GC_G(first_unused);
  1334. if (gc_flags & GC_HAS_DESTRUCTORS) {
  1335. uint32_t *refcounts;
  1336. GC_TRACE("Calling destructors");
  1337. // TODO: may be use emalloc() ???
  1338. refcounts = pemalloc(sizeof(uint32_t) * end, 1);
  1339. /* Remember reference counters before calling destructors */
  1340. idx = GC_FIRST_ROOT;
  1341. current = GC_IDX2PTR(GC_FIRST_ROOT);
  1342. while (idx != end) {
  1343. if (GC_IS_GARBAGE(current->ref)) {
  1344. p = GC_GET_PTR(current->ref);
  1345. refcounts[idx] = GC_REFCOUNT(p);
  1346. }
  1347. current++;
  1348. idx++;
  1349. }
  1350. /* Call destructors
  1351. *
  1352. * The root buffer might be reallocated during destructors calls,
  1353. * make sure to reload pointers as necessary. */
  1354. idx = GC_FIRST_ROOT;
  1355. while (idx != end) {
  1356. current = GC_IDX2PTR(idx);
  1357. if (GC_IS_GARBAGE(current->ref)) {
  1358. p = GC_GET_PTR(current->ref);
  1359. if (GC_TYPE(p) == IS_OBJECT
  1360. && !(OBJ_FLAGS(p) & IS_OBJ_DESTRUCTOR_CALLED)) {
  1361. zend_object *obj = (zend_object*)p;
  1362. GC_TRACE_REF(obj, "calling destructor");
  1363. GC_ADD_FLAGS(obj, IS_OBJ_DESTRUCTOR_CALLED);
  1364. if (obj->handlers->dtor_obj
  1365. && (obj->handlers->dtor_obj != zend_objects_destroy_object
  1366. || obj->ce->destructor)) {
  1367. GC_ADDREF(obj);
  1368. obj->handlers->dtor_obj(obj);
  1369. GC_DELREF(obj);
  1370. }
  1371. }
  1372. }
  1373. idx++;
  1374. }
  1375. /* Remove values captured in destructors */
  1376. idx = GC_FIRST_ROOT;
  1377. current = GC_IDX2PTR(GC_FIRST_ROOT);
  1378. while (idx != end) {
  1379. if (GC_IS_GARBAGE(current->ref)) {
  1380. p = GC_GET_PTR(current->ref);
  1381. if (GC_REFCOUNT(p) > refcounts[idx]) {
  1382. gc_remove_nested_data_from_buffer(p, current);
  1383. }
  1384. }
  1385. current++;
  1386. idx++;
  1387. }
  1388. pefree(refcounts, 1);
  1389. if (GC_G(gc_protected)) {
  1390. /* something went wrong */
  1391. return 0;
  1392. }
  1393. }
  1394. /* Destroy zvals */
  1395. GC_TRACE("Destroying zvals");
  1396. GC_G(gc_protected) = 1;
  1397. current = GC_IDX2PTR(GC_FIRST_ROOT);
  1398. last = GC_IDX2PTR(GC_G(first_unused));
  1399. while (current != last) {
  1400. if (GC_IS_GARBAGE(current->ref)) {
  1401. p = GC_GET_PTR(current->ref);
  1402. GC_TRACE_REF(p, "destroying");
  1403. if (GC_TYPE(p) == IS_OBJECT) {
  1404. zend_object *obj = (zend_object*)p;
  1405. EG(objects_store).object_buckets[obj->handle] = SET_OBJ_INVALID(obj);
  1406. GC_TYPE_INFO(obj) = IS_NULL |
  1407. (GC_TYPE_INFO(obj) & ~GC_TYPE_MASK);
  1408. if (!(OBJ_FLAGS(obj) & IS_OBJ_FREE_CALLED)) {
  1409. GC_ADD_FLAGS(obj, IS_OBJ_FREE_CALLED);
  1410. if (obj->handlers->free_obj) {
  1411. GC_ADDREF(obj);
  1412. obj->handlers->free_obj(obj);
  1413. GC_DELREF(obj);
  1414. }
  1415. }
  1416. ZEND_OBJECTS_STORE_ADD_TO_FREE_LIST(obj->handle);
  1417. current->ref = GC_MAKE_GARBAGE(((char*)obj) - obj->handlers->offset);
  1418. } else if (GC_TYPE(p) == IS_ARRAY) {
  1419. zend_array *arr = (zend_array*)p;
  1420. GC_TYPE_INFO(arr) = IS_NULL |
  1421. (GC_TYPE_INFO(arr) & ~GC_TYPE_MASK);
  1422. /* GC may destroy arrays with rc>1. This is valid and safe. */
  1423. HT_ALLOW_COW_VIOLATION(arr);
  1424. zend_hash_destroy(arr);
  1425. }
  1426. }
  1427. current++;
  1428. }
  1429. /* Free objects */
  1430. current = GC_IDX2PTR(GC_FIRST_ROOT);
  1431. while (current != last) {
  1432. if (GC_IS_GARBAGE(current->ref)) {
  1433. p = GC_GET_PTR(current->ref);
  1434. GC_LINK_UNUSED(current);
  1435. GC_G(num_roots)--;
  1436. efree(p);
  1437. }
  1438. current++;
  1439. }
  1440. GC_TRACE("Collection finished");
  1441. GC_G(collected) += count;
  1442. GC_G(gc_protected) = 0;
  1443. GC_G(gc_active) = 0;
  1444. }
  1445. gc_compact();
  1446. return count;
  1447. }
  1448. ZEND_API void zend_gc_get_status(zend_gc_status *status)
  1449. {
  1450. status->runs = GC_G(gc_runs);
  1451. status->collected = GC_G(collected);
  1452. status->threshold = GC_G(gc_threshold);
  1453. status->num_roots = GC_G(num_roots);
  1454. }
  1455. /*
  1456. * Local variables:
  1457. * tab-width: 4
  1458. * c-basic-offset: 4
  1459. * indent-tabs-mode: t
  1460. * End:
  1461. * vim600: sw=4 ts=4 fdm=marker
  1462. * vim<600: sw=4 ts=4
  1463. *
  1464. * vim:noexpandtab:
  1465. */