Github User Fetcher 1.0.0
C Application with Server and GUI
Loading...
Searching...
No Matches
duktape-1.8.0/src-separate/duk_heap_markandsweep.c
Go to the documentation of this file.
1/*
2 * Mark-and-sweep garbage collection.
3 */
4
5#include "duk_internal.h"
6
7#ifdef DUK_USE_MARK_AND_SWEEP
8
11
12/*
13 * Misc
14 */
15
16/* Select a thread for mark-and-sweep use.
17 *
18 * XXX: This needs to change later.
19 */
21 if (heap->curr_thread) {
22 return heap->curr_thread;
23 }
24 return heap->heap_thread; /* may be NULL, too */
25}
26
27/*
28 * Marking functions for heap types: mark children recursively
29 */
30
32 DUK_UNREF(heap);
33 DUK_UNREF(h);
34
35 DUK_DDD(DUK_DDDPRINT("duk__mark_hstring: %p", (void *) h));
36 DUK_ASSERT(h);
37
38 /* nothing to process */
39}
40
43
44 DUK_DDD(DUK_DDDPRINT("duk__mark_hobject: %p", (void *) h));
45
46 DUK_ASSERT(h);
47
48 /* XXX: use advancing pointers instead of index macros -> faster and smaller? */
49
50 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ENEXT(h); i++) {
51 duk_hstring *key = DUK_HOBJECT_E_GET_KEY(heap, h, i);
52 if (!key) {
53 continue;
54 }
55 duk__mark_heaphdr(heap, (duk_heaphdr *) key);
56 if (DUK_HOBJECT_E_SLOT_IS_ACCESSOR(heap, h, i)) {
59 } else {
60 duk__mark_tval(heap, &DUK_HOBJECT_E_GET_VALUE_PTR(heap, h, i)->v);
61 }
62 }
63
64 for (i = 0; i < (duk_uint_fast32_t) DUK_HOBJECT_GET_ASIZE(h); i++) {
66 }
67
68 /* hash part is a 'weak reference' and does not contribute */
69
71
74 duk_tval *tv, *tv_end;
75 duk_hobject **fn, **fn_end;
76
77 /* 'data' is reachable through every compiled function which
78 * contains a reference.
79 */
80
82
83 if (DUK_HCOMPILEDFUNCTION_GET_DATA(heap, f) != NULL) {
86 while (tv < tv_end) {
87 duk__mark_tval(heap, tv);
88 tv++;
89 }
90
93 while (fn < fn_end) {
94 duk__mark_heaphdr(heap, (duk_heaphdr *) *fn);
95 fn++;
96 }
97 } else {
98 /* May happen in some out-of-memory corner cases. */
99 DUK_D(DUK_DPRINT("duk_hcompiledfunction 'data' is NULL, skipping marking"));
100 }
101 } else if (DUK_HOBJECT_IS_NATIVEFUNCTION(h)) {
103 DUK_UNREF(f);
104 /* nothing to mark */
105 } else if (DUK_HOBJECT_IS_BUFFEROBJECT(h)) {
107 duk__mark_heaphdr(heap, (duk_heaphdr *) b->buf);
108 } else if (DUK_HOBJECT_IS_THREAD(h)) {
109 duk_hthread *t = (duk_hthread *) h;
110 duk_tval *tv;
111
112 tv = t->valstack;
113 while (tv < t->valstack_top) {
114 duk__mark_tval(heap, tv);
115 tv++;
116 }
117
118 for (i = 0; i < (duk_uint_fast32_t) t->callstack_top; i++) {
119 duk_activation *act = t->callstack + i;
121 duk__mark_heaphdr(heap, (duk_heaphdr *) act->var_env);
122 duk__mark_heaphdr(heap, (duk_heaphdr *) act->lex_env);
123#ifdef DUK_USE_NONSTD_FUNC_CALLER_PROPERTY
124 duk__mark_heaphdr(heap, (duk_heaphdr *) act->prev_caller);
125#endif
126 }
127
128#if 0 /* nothing now */
129 for (i = 0; i < (duk_uint_fast32_t) t->catchstack_top; i++) {
130 duk_catcher *cat = t->catchstack + i;
131 }
132#endif
133
135
136 /* XXX: duk_small_uint_t would be enough for this loop */
137 for (i = 0; i < DUK_NUM_BUILTINS; i++) {
138 duk__mark_heaphdr(heap, (duk_heaphdr *) t->builtins[i]);
139 }
140 }
141}
142
143/* recursion tracking happens here only */
145 DUK_DDD(DUK_DDDPRINT("duk__mark_heaphdr %p, type %ld",
146 (void *) h,
147 (h != NULL ? (long) DUK_HEAPHDR_GET_TYPE(h) : (long) -1)));
148 if (!h) {
149 return;
150 }
151#if defined(DUK_USE_ROM_OBJECTS)
153 DUK_DDD(DUK_DDDPRINT("readonly object %p, skip", (void *) h));
154 return;
155 }
156#endif
158 DUK_DDD(DUK_DDDPRINT("already marked reachable, skip"));
159 return;
160 }
162
164 /* log this with a normal debug level because this should be relatively rare */
165 DUK_D(DUK_DPRINT("mark-and-sweep recursion limit reached, marking as temproot: %p", (void *) h));
168 return;
169 }
170
172
173 switch ((int) DUK_HEAPHDR_GET_TYPE(h)) {
174 case DUK_HTYPE_STRING:
175 duk__mark_hstring(heap, (duk_hstring *) h);
176 break;
177 case DUK_HTYPE_OBJECT:
178 duk__mark_hobject(heap, (duk_hobject *) h);
179 break;
180 case DUK_HTYPE_BUFFER:
181 /* nothing to mark */
182 break;
183 default:
184 DUK_D(DUK_DPRINT("attempt to mark heaphdr %p with invalid htype %ld", (void *) h, (long) DUK_HEAPHDR_GET_TYPE(h)));
186 }
187
189}
190
192 DUK_DDD(DUK_DDDPRINT("duk__mark_tval %p", (void *) tv));
193 if (!tv) {
194 return;
195 }
198 }
199}
200
201/*
202 * Mark the heap.
203 */
204
207
208 DUK_DD(DUK_DDPRINT("duk__mark_roots_heap: %p", (void *) heap));
209
212
213 for (i = 0; i < DUK_HEAP_NUM_STRINGS; i++) {
214 duk_hstring *h = DUK_HEAP_GET_STRING(heap, i);
215 duk__mark_heaphdr(heap, (duk_heaphdr *) h);
216 }
217
218 duk__mark_tval(heap, &heap->lj.value1);
219 duk__mark_tval(heap, &heap->lj.value2);
220
221#if defined(DUK_USE_DEBUGGER_SUPPORT)
222 for (i = 0; i < heap->dbg_breakpoint_count; i++) {
223 duk__mark_heaphdr(heap, (duk_heaphdr *) heap->dbg_breakpoints[i].filename);
224 }
225#endif
226}
227
228/*
229 * Mark refzero_list objects.
230 *
231 * Objects on the refzero_list have no inbound references. They might have
232 * outbound references to objects that we might free, which would invalidate
233 * any references held by the refzero objects. A refzero object might also
234 * be rescued by refcount finalization. Refzero objects are treated as
235 * reachability roots to ensure they (or anything they point to) are not
236 * freed in mark-and-sweep.
237 */
238
239#ifdef DUK_USE_REFERENCE_COUNTING
241 duk_heaphdr *hdr;
242
243 DUK_DD(DUK_DDPRINT("duk__mark_refzero_list: %p", (void *) heap));
244
245 hdr = heap->refzero_list;
246 while (hdr) {
247 duk__mark_heaphdr(heap, hdr);
248 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
249 }
250}
251#endif
252
253/*
254 * Mark unreachable, finalizable objects.
255 *
256 * Such objects will be moved aside and their finalizers run later. They have
257 * to be treated as reachability roots for their properties etc to remain
258 * allocated. This marking is only done for unreachable values which would
259 * be swept later (refzero_list is thus excluded).
260 *
261 * Objects are first marked FINALIZABLE and only then marked as reachability
262 * roots; otherwise circular references might be handled inconsistently.
263 */
264
266 duk_hthread *thr;
267 duk_heaphdr *hdr;
268 duk_size_t count_finalizable = 0;
269
270 DUK_DD(DUK_DDPRINT("duk__mark_finalizable: %p", (void *) heap));
271
272 thr = duk__get_temp_hthread(heap);
273 DUK_ASSERT(thr != NULL);
274
275 hdr = heap->heap_allocated;
276 while (hdr) {
277 /* A finalizer is looked up from the object and up its prototype chain
278 * (which allows inherited finalizers). A prototype loop must not cause
279 * an error to be thrown here; duk_hobject_hasprop_raw() will ignore a
280 * prototype loop silently and indicate that the property doesn't exist.
281 */
282
283 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr) &&
287
288 /* heaphdr:
289 * - is not reachable
290 * - is an object
291 * - is not a finalized object
292 * - has a finalizer
293 */
294
295 DUK_DD(DUK_DDPRINT("unreachable heap object will be "
296 "finalized -> mark as finalizable "
297 "and treat as a reachability root: %p",
298 (void *) hdr));
301 count_finalizable ++;
302 }
303
304 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
305 }
306
307 if (count_finalizable == 0) {
308 return;
309 }
310
311 DUK_DD(DUK_DDPRINT("marked %ld heap objects as finalizable, now mark them reachable",
312 (long) count_finalizable));
313
314 hdr = heap->heap_allocated;
315 while (hdr) {
317 duk__mark_heaphdr(heap, hdr);
318 }
319
320 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
321 }
322
323 /* Caller will finish the marking process if we hit a recursion limit. */
324}
325
326/*
327 * Mark objects on finalize_list.
328 *
329 */
330
332 duk_heaphdr *hdr;
333#ifdef DUK_USE_DEBUG
334 duk_size_t count_finalize_list = 0;
335#endif
336
337 DUK_DD(DUK_DDPRINT("duk__mark_finalize_list: %p", (void *) heap));
338
339 hdr = heap->finalize_list;
340 while (hdr) {
341 duk__mark_heaphdr(heap, hdr);
342 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
343#ifdef DUK_USE_DEBUG
344 count_finalize_list++;
345#endif
346 }
347
348#ifdef DUK_USE_DEBUG
349 if (count_finalize_list > 0) {
350 DUK_D(DUK_DPRINT("marked %ld objects on the finalize_list as reachable (previous finalizer run skipped)",
351 (long) count_finalize_list));
352 }
353#endif
354}
355
356/*
357 * Fallback marking handler if recursion limit is reached.
358 *
359 * Iterates 'temproots' until recursion limit is no longer hit. Note
360 * that temproots may reside either in heap allocated list or the
361 * refzero work list. This is a slow scan, but guarantees that we
362 * finish with a bounded C stack.
363 *
364 * Note that nodes may have been marked as temproots before this
365 * scan begun, OR they may have been marked during the scan (as
366 * we process nodes recursively also during the scan). This is
367 * intended behavior.
368 */
369
370#ifdef DUK_USE_DEBUG
372#else
374#endif
375 if (!DUK_HEAPHDR_HAS_TEMPROOT(hdr)) {
376 DUK_DDD(DUK_DDDPRINT("not a temp root: %p", (void *) hdr));
377 return;
378 }
379
380 DUK_DDD(DUK_DDDPRINT("found a temp root: %p", (void *) hdr));
382 DUK_HEAPHDR_CLEAR_REACHABLE(hdr); /* done so that duk__mark_heaphdr() works correctly */
383 duk__mark_heaphdr(heap, hdr);
384
385#ifdef DUK_USE_DEBUG
386 (*count)++;
387#endif
388}
389
391 duk_heaphdr *hdr;
392#ifdef DUK_USE_DEBUG
393 duk_size_t count;
394#endif
395
396 DUK_DD(DUK_DDPRINT("duk__mark_temproots_by_heap_scan: %p", (void *) heap));
397
399 DUK_DD(DUK_DDPRINT("recursion limit reached, doing heap scan to continue from temproots"));
400
401#ifdef DUK_USE_DEBUG
402 count = 0;
403#endif
405
406 hdr = heap->heap_allocated;
407 while (hdr) {
408#ifdef DUK_USE_DEBUG
409 duk__handle_temproot(heap, hdr, &count);
410#else
411 duk__handle_temproot(heap, hdr);
412#endif
413 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
414 }
415
416 /* must also check refzero_list */
417#ifdef DUK_USE_REFERENCE_COUNTING
418 hdr = heap->refzero_list;
419 while (hdr) {
420#ifdef DUK_USE_DEBUG
421 duk__handle_temproot(heap, hdr, &count);
422#else
423 duk__handle_temproot(heap, hdr);
424#endif
425 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
426 }
427#endif /* DUK_USE_REFERENCE_COUNTING */
428
429#ifdef DUK_USE_DEBUG
430 DUK_DD(DUK_DDPRINT("temproot mark heap scan processed %ld temp roots", (long) count));
431#endif
432 }
433}
434
435/*
436 * Finalize refcounts for heap elements just about to be freed.
437 * This must be done for all objects before freeing to avoid any
438 * stale pointer dereferences.
439 *
440 * Note that this must deduce the set of objects to be freed
441 * identically to duk__sweep_heap().
442 */
443
444#ifdef DUK_USE_REFERENCE_COUNTING
446 duk_hthread *thr;
447 duk_heaphdr *hdr;
448
449 thr = duk__get_temp_hthread(heap);
450 DUK_ASSERT(thr != NULL);
451
452 DUK_DD(DUK_DDPRINT("duk__finalize_refcounts: heap=%p, hthread=%p",
453 (void *) heap, (void *) thr));
454
455 hdr = heap->heap_allocated;
456 while (hdr) {
457 if (!DUK_HEAPHDR_HAS_REACHABLE(hdr)) {
458 /*
459 * Unreachable object about to be swept. Finalize target refcounts
460 * (objects which the unreachable object points to) without doing
461 * refzero processing. Recursive decrefs are also prevented when
462 * refzero processing is disabled.
463 *
464 * Value cannot be a finalizable object, as they have been made
465 * temporarily reachable for this round.
466 */
467
468 DUK_DDD(DUK_DDDPRINT("unreachable object, refcount finalize before sweeping: %p", (void *) hdr));
470 }
471
472 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
473 }
474}
475#endif /* DUK_USE_REFERENCE_COUNTING */
476
477/*
478 * Clear (reachable) flags of refzero work list.
479 */
480
481#ifdef DUK_USE_REFERENCE_COUNTING
483 duk_heaphdr *hdr;
484
485 DUK_DD(DUK_DDPRINT("duk__clear_refzero_list_flags: %p", (void *) heap));
486
487 hdr = heap->refzero_list;
488 while (hdr) {
491 /* DUK_HEAPHDR_HAS_FINALIZED may or may not be set. */
493 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
494 }
495}
496#endif /* DUK_USE_REFERENCE_COUNTING */
497
498/*
499 * Clear (reachable) flags of finalize_list
500 *
501 * We could mostly do in the sweep phase when we move objects from the
502 * heap into the finalize_list. However, if a finalizer run is skipped
503 * during a mark-and-sweep, the objects on the finalize_list will be marked
504 * reachable during the next mark-and-sweep. Since they're already on the
505 * finalize_list, no-one will be clearing their REACHABLE flag so we do it
506 * here. (This now overlaps with the sweep handling in a harmless way.)
507 */
508
510 duk_heaphdr *hdr;
511
512 DUK_DD(DUK_DDPRINT("duk__clear_finalize_list_flags: %p", (void *) heap));
513
514 hdr = heap->finalize_list;
515 while (hdr) {
520 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
521 }
522}
523
524/*
525 * Sweep stringtable
526 */
527
528#if defined(DUK_USE_STRTAB_CHAIN)
529
530/* XXX: skip count_free w/o debug? */
531#if defined(DUK_USE_HEAPPTR16)
532DUK_LOCAL void duk__sweep_string_chain16(duk_heap *heap, duk_uint16_t *slot, duk_size_t *count_keep, duk_size_t *count_free) {
533 duk_uint16_t h16 = *slot;
534 duk_hstring *h;
535 duk_uint16_t null16 = heap->heapptr_null16;
536
537 if (h16 == null16) {
538 /* nop */
539 return;
540 }
541 h = (duk_hstring *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, h16);
542 DUK_ASSERT(h != NULL);
543
546 (*count_keep)++;
547 } else {
548#if defined(DUK_USE_REFERENCE_COUNTING)
550#endif
551 /* deal with weak references first */
553 *slot = null16;
554
555 /* free inner references (these exist e.g. when external
556 * strings are enabled)
557 */
558 duk_free_hstring_inner(heap, h);
559 DUK_FREE(heap, h);
560 (*count_free)++;
561 }
562}
563#else /* DUK_USE_HEAPPTR16 */
564DUK_LOCAL void duk__sweep_string_chain(duk_heap *heap, duk_hstring **slot, duk_size_t *count_keep, duk_size_t *count_free) {
565 duk_hstring *h = *slot;
566
567 if (h == NULL) {
568 /* nop */
569 return;
570 }
571
574 (*count_keep)++;
575 } else {
576#if defined(DUK_USE_REFERENCE_COUNTING)
578#endif
579 /* deal with weak references first */
581 *slot = NULL;
582
583 /* free inner references (these exist e.g. when external
584 * strings are enabled)
585 */
586 duk_free_hstring_inner(heap, h);
587 DUK_FREE(heap, h);
588 (*count_free)++;
589 }
590}
591#endif /* DUK_USE_HEAPPTR16 */
592
593DUK_LOCAL void duk__sweep_stringtable_chain(duk_heap *heap, duk_size_t *out_count_keep) {
596 duk_size_t count_free = 0;
597 duk_size_t count_keep = 0;
598 duk_size_t j, n;
599#if defined(DUK_USE_HEAPPTR16)
600 duk_uint16_t *lst;
601#else
602 duk_hstring **lst;
603#endif
604
605 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap));
606
607 /* Non-zero refcounts should not happen for unreachable strings,
608 * because we refcount finalize all unreachable objects which
609 * should have decreased unreachable string refcounts to zero
610 * (even for cycles).
611 */
612
613 for (i = 0; i < DUK_STRTAB_CHAIN_SIZE; i++) {
614 e = heap->strtable + i;
615 if (e->listlen == 0) {
616#if defined(DUK_USE_HEAPPTR16)
617 duk__sweep_string_chain16(heap, &e->u.str16, &count_keep, &count_free);
618#else
619 duk__sweep_string_chain(heap, &e->u.str, &count_keep, &count_free);
620#endif
621 } else {
622#if defined(DUK_USE_HEAPPTR16)
623 lst = (duk_uint16_t *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, e->u.strlist16);
624#else
625 lst = e->u.strlist;
626#endif
627 for (j = 0, n = e->listlen; j < n; j++) {
628#if defined(DUK_USE_HEAPPTR16)
629 duk__sweep_string_chain16(heap, lst + j, &count_keep, &count_free);
630#else
631 duk__sweep_string_chain(heap, lst + j, &count_keep, &count_free);
632#endif
633 }
634 }
635 }
636
637 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
638 (long) count_free, (long) count_keep));
639 *out_count_keep = count_keep;
640}
641#endif /* DUK_USE_STRTAB_CHAIN */
642
643#if defined(DUK_USE_STRTAB_PROBE)
645 duk_hstring *h;
647#ifdef DUK_USE_DEBUG
648 duk_size_t count_free = 0;
649#endif
650 duk_size_t count_keep = 0;
651
652 DUK_DD(DUK_DDPRINT("duk__sweep_stringtable: %p", (void *) heap));
653
654 for (i = 0; i < heap->st_size; i++) {
655#if defined(DUK_USE_HEAPPTR16)
656 h = (duk_hstring *) DUK_USE_HEAPPTR_DEC16(heap->heap_udata, heap->strtable16[i]);
657#else
658 h = heap->strtable[i];
659#endif
660 if (h == NULL || h == DUK_STRTAB_DELETED_MARKER(heap)) {
661 continue;
662 } else if (DUK_HEAPHDR_HAS_REACHABLE((duk_heaphdr *) h)) {
664 count_keep++;
665 continue;
666 }
667
668#ifdef DUK_USE_DEBUG
669 count_free++;
670#endif
671
672#if defined(DUK_USE_REFERENCE_COUNTING)
673 /* Non-zero refcounts should not happen for unreachable strings,
674 * because we refcount finalize all unreachable objects which
675 * should have decreased unreachable string refcounts to zero
676 * (even for cycles).
677 */
679#endif
680
681 DUK_DDD(DUK_DDDPRINT("sweep string, not reachable: %p", (void *) h));
682
683 /* deal with weak references first */
685
686 /* remove the string (mark DELETED), could also call
687 * duk_heap_string_remove() but that would be slow and
688 * pointless because we already know the slot.
689 */
690#if defined(DUK_USE_HEAPPTR16)
691 heap->strtable16[i] = heap->heapptr_deleted16;
692#else
693 heap->strtable[i] = DUK_STRTAB_DELETED_MARKER(heap);
694#endif
695
696 /* free inner references (these exist e.g. when external
697 * strings are enabled)
698 */
700
701 /* finally free the struct itself */
702 DUK_FREE(heap, h);
703 }
704
705#ifdef DUK_USE_DEBUG
706 DUK_D(DUK_DPRINT("mark-and-sweep sweep stringtable: %ld freed, %ld kept",
707 (long) count_free, (long) count_keep));
708#endif
709 *out_count_keep = count_keep;
710}
711#endif /* DUK_USE_STRTAB_PROBE */
712
713/*
714 * Sweep heap
715 */
716
717DUK_LOCAL void duk__sweep_heap(duk_heap *heap, duk_int_t flags, duk_size_t *out_count_keep) {
718 duk_heaphdr *prev; /* last element that was left in the heap */
719 duk_heaphdr *curr;
721#ifdef DUK_USE_DEBUG
722 duk_size_t count_free = 0;
723 duk_size_t count_finalize = 0;
724 duk_size_t count_rescue = 0;
725#endif
726 duk_size_t count_keep = 0;
727
728 DUK_UNREF(flags);
729 DUK_DD(DUK_DDPRINT("duk__sweep_heap: %p", (void *) heap));
730
731 prev = NULL;
732 curr = heap->heap_allocated;
733 heap->heap_allocated = NULL;
734 while (curr) {
735 /* Strings and ROM objects are never placed on the heap allocated list. */
738
739 next = DUK_HEAPHDR_GET_NEXT(heap, curr);
740
741 if (DUK_HEAPHDR_HAS_REACHABLE(curr)) {
742 /*
743 * Reachable object, keep
744 */
745
746 DUK_DDD(DUK_DDDPRINT("sweep, reachable: %p", (void *) curr));
747
748 if (DUK_HEAPHDR_HAS_FINALIZABLE(curr)) {
749 /*
750 * If object has been marked finalizable, move it to the
751 * "to be finalized" work list. It will be collected on
752 * the next mark-and-sweep if it is still unreachable
753 * after running the finalizer.
754 */
755
758 DUK_DDD(DUK_DDDPRINT("object has finalizer, move to finalization work list: %p", (void *) curr));
759
760#ifdef DUK_USE_DOUBLE_LINKED_HEAP
761 if (heap->finalize_list) {
762 DUK_HEAPHDR_SET_PREV(heap, heap->finalize_list, curr);
763 }
764 DUK_HEAPHDR_SET_PREV(heap, curr, NULL);
765#endif
766 DUK_HEAPHDR_SET_NEXT(heap, curr, heap->finalize_list);
767 DUK_ASSERT_HEAPHDR_LINKS(heap, curr);
768 heap->finalize_list = curr;
769#ifdef DUK_USE_DEBUG
770 count_finalize++;
771#endif
772 } else {
773 /*
774 * Object will be kept; queue object back to heap_allocated (to tail)
775 */
776
777 if (DUK_HEAPHDR_HAS_FINALIZED(curr)) {
778 /*
779 * Object's finalizer was executed on last round, and
780 * object has been happily rescued.
781 */
782
785 DUK_DD(DUK_DDPRINT("object rescued during mark-and-sweep finalization: %p", (void *) curr));
786#ifdef DUK_USE_DEBUG
787 count_rescue++;
788#endif
789 } else {
790 /*
791 * Plain, boring reachable object.
792 */
793 DUK_DD(DUK_DDPRINT("keep object: %!iO", curr));
794 count_keep++;
795 }
796
797 if (!heap->heap_allocated) {
798 heap->heap_allocated = curr;
799 }
800 if (prev) {
801 DUK_HEAPHDR_SET_NEXT(heap, prev, curr);
802 }
803#ifdef DUK_USE_DOUBLE_LINKED_HEAP
804 DUK_HEAPHDR_SET_PREV(heap, curr, prev);
805#endif
806 DUK_ASSERT_HEAPHDR_LINKS(heap, prev);
807 DUK_ASSERT_HEAPHDR_LINKS(heap, curr);
808 prev = curr;
809 }
810
814
818
819 curr = next;
820 } else {
821 /*
822 * Unreachable object, free
823 */
824
825 DUK_DDD(DUK_DDDPRINT("sweep, not reachable: %p", (void *) curr));
826
827#if defined(DUK_USE_REFERENCE_COUNTING)
828 /* Non-zero refcounts should not happen because we refcount
829 * finalize all unreachable objects which should cancel out
830 * refcounts (even for cycles).
831 */
833#endif
835
836 if (DUK_HEAPHDR_HAS_FINALIZED(curr)) {
837 DUK_DDD(DUK_DDDPRINT("finalized object not rescued: %p", (void *) curr));
838 }
839
840 /* Note: object cannot be a finalizable unreachable object, as
841 * they have been marked temporarily reachable for this round,
842 * and are handled above.
843 */
844
845#ifdef DUK_USE_DEBUG
846 count_free++;
847#endif
848
849 /* weak refs should be handled here, but no weak refs for
850 * any non-string objects exist right now.
851 */
852
853 /* free object and all auxiliary (non-heap) allocs */
854 duk_heap_free_heaphdr_raw(heap, curr);
855
856 curr = next;
857 }
858 }
859 if (prev) {
860 DUK_HEAPHDR_SET_NEXT(heap, prev, NULL);
861 }
862 DUK_ASSERT_HEAPHDR_LINKS(heap, prev);
863
864#ifdef DUK_USE_DEBUG
865 DUK_D(DUK_DPRINT("mark-and-sweep sweep objects (non-string): %ld freed, %ld kept, %ld rescued, %ld queued for finalization",
866 (long) count_free, (long) count_keep, (long) count_rescue, (long) count_finalize));
867#endif
868 *out_count_keep = count_keep;
869}
870
871/*
872 * Run (object) finalizers in the "to be finalized" work list.
873 */
874
876 duk_heaphdr *curr;
878#ifdef DUK_USE_DEBUG
879 duk_size_t count = 0;
880#endif
881 duk_hthread *thr;
882
883 DUK_DD(DUK_DDPRINT("duk__run_object_finalizers: %p", (void *) heap));
884
885 thr = duk__get_temp_hthread(heap);
886 DUK_ASSERT(thr != NULL);
887
888 curr = heap->finalize_list;
889 while (curr) {
890 DUK_DDD(DUK_DDDPRINT("mark-and-sweep finalize: %p", (void *) curr));
891
892 DUK_ASSERT(DUK_HEAPHDR_GET_TYPE(curr) == DUK_HTYPE_OBJECT); /* only objects have finalizers */
893 DUK_ASSERT(!DUK_HEAPHDR_HAS_REACHABLE(curr)); /* flags have been already cleared */
897 DUK_ASSERT(!DUK_HEAPHDR_HAS_READONLY(curr)); /* No finalizers for ROM objects */
898
899 /* Keep heap->finalize_list up-to-date during the list walk.
900 * This has no functional impact, but does matter e.g. for
901 * duk_push_heapptr() asserts when assertions are enabled.
902 */
903 heap->finalize_list = curr;
904
905 if (DUK_LIKELY((flags & DUK_MS_FLAG_SKIP_FINALIZERS) == 0)) {
906 /* Run the finalizer, duk_hobject_run_finalizer() sets FINALIZED.
907 * Next mark-and-sweep will collect the object unless it has
908 * become reachable (i.e. rescued). FINALIZED prevents the
909 * finalizer from being executed again before that.
910 */
911 duk_hobject_run_finalizer(thr, (duk_hobject *) curr); /* must never longjmp */
913
914 /* XXX: could clear FINALIZED already here; now cleared in
915 * next mark-and-sweep.
916 */
917 } else {
918 /* Used during heap destruction: don't actually run finalizers
919 * because we're heading into forced finalization. Instead,
920 * queue finalizable objects back to the heap_allocated list.
921 */
922 DUK_D(DUK_DPRINT("skip finalizers flag set, queue object to heap_allocated without finalizing"));
924 }
925
926 /* queue back to heap_allocated */
927 next = DUK_HEAPHDR_GET_NEXT(heap, curr);
929
930 curr = next;
931#ifdef DUK_USE_DEBUG
932 count++;
933#endif
934 }
935
936 /* finalize_list will always be processed completely */
937 heap->finalize_list = NULL;
938
939#ifdef DUK_USE_DEBUG
940 DUK_D(DUK_DPRINT("mark-and-sweep finalize objects: %ld finalizers called", (long) count));
941#endif
942}
943
944/*
945 * Object compaction.
946 *
947 * Compaction is assumed to never throw an error.
948 */
949
951 /* XXX: for threads, compact value stack, call stack, catch stack? */
952
953 duk_hobject *obj = duk_get_hobject(ctx, -1);
954 DUK_ASSERT(obj != NULL);
956 return 0;
957}
958
959#ifdef DUK_USE_DEBUG
960DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start, duk_size_t *p_count_check, duk_size_t *p_count_compact, duk_size_t *p_count_bytes_saved) {
961#else
963#endif
964 duk_heaphdr *curr;
965#ifdef DUK_USE_DEBUG
966 duk_size_t old_size, new_size;
967#endif
968 duk_hobject *obj;
969
970 DUK_UNREF(heap);
971
972 curr = start;
973 while (curr) {
974 DUK_DDD(DUK_DDDPRINT("mark-and-sweep compact: %p", (void *) curr));
975
977 goto next;
978 }
979 obj = (duk_hobject *) curr;
980
981#ifdef DUK_USE_DEBUG
985#endif
986
987 DUK_DD(DUK_DDPRINT("compact object: %p", (void *) obj));
988 duk_push_hobject((duk_context *) thr, obj);
989 /* XXX: disable error handlers for duration of compaction? */
991
992#ifdef DUK_USE_DEBUG
996#endif
997
998#ifdef DUK_USE_DEBUG
999 (*p_count_compact)++;
1000 (*p_count_bytes_saved) += (duk_size_t) (old_size - new_size);
1001#endif
1002
1003 next:
1004 curr = DUK_HEAPHDR_GET_NEXT(heap, curr);
1005#ifdef DUK_USE_DEBUG
1006 (*p_count_check)++;
1007#endif
1008 }
1009}
1010
1012 /* XXX: which lists should participate? to be finalized? */
1013#ifdef DUK_USE_DEBUG
1014 duk_size_t count_check = 0;
1015 duk_size_t count_compact = 0;
1016 duk_size_t count_bytes_saved = 0;
1017#endif
1018 duk_hthread *thr;
1019
1020 DUK_DD(DUK_DDPRINT("duk__compact_objects: %p", (void *) heap));
1021
1022 thr = duk__get_temp_hthread(heap);
1023 DUK_ASSERT(thr != NULL);
1024
1025#ifdef DUK_USE_DEBUG
1026 duk__compact_object_list(heap, thr, heap->heap_allocated, &count_check, &count_compact, &count_bytes_saved);
1027 duk__compact_object_list(heap, thr, heap->finalize_list, &count_check, &count_compact, &count_bytes_saved);
1028#ifdef DUK_USE_REFERENCE_COUNTING
1029 duk__compact_object_list(heap, thr, heap->refzero_list, &count_check, &count_compact, &count_bytes_saved);
1030#endif
1031#else
1032 duk__compact_object_list(heap, thr, heap->heap_allocated);
1033 duk__compact_object_list(heap, thr, heap->finalize_list);
1034#ifdef DUK_USE_REFERENCE_COUNTING
1035 duk__compact_object_list(heap, thr, heap->refzero_list);
1036#endif
1037#endif
1038
1039#ifdef DUK_USE_DEBUG
1040 DUK_D(DUK_DPRINT("mark-and-sweep compact objects: %ld checked, %ld compaction attempts, %ld bytes saved by compaction",
1041 (long) count_check, (long) count_compact, (long) count_bytes_saved));
1042#endif
1043}
1044
1045/*
1046 * Assertion helpers.
1047 */
1048
1049#ifdef DUK_USE_ASSERTIONS
1050DUK_LOCAL void duk__assert_heaphdr_flags(duk_heap *heap) {
1051 duk_heaphdr *hdr;
1052
1053 hdr = heap->heap_allocated;
1054 while (hdr) {
1058 /* may have FINALIZED */
1059 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1060 }
1061
1062#ifdef DUK_USE_REFERENCE_COUNTING
1063 hdr = heap->refzero_list;
1064 while (hdr) {
1068 /* DUK_HEAPHDR_HAS_FINALIZED may be set if we're doing a
1069 * refzero finalization and mark-and-sweep gets triggered
1070 * during the finalizer.
1071 */
1072 /* DUK_HEAPHDR_HAS_FINALIZED may or may not be set. */
1073 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1074 }
1075#endif /* DUK_USE_REFERENCE_COUNTING */
1076}
1077
1078#ifdef DUK_USE_REFERENCE_COUNTING
1079DUK_LOCAL void duk__assert_valid_refcounts(duk_heap *heap) {
1080 duk_heaphdr *hdr = heap->heap_allocated;
1081 while (hdr) {
1082 if (DUK_HEAPHDR_GET_REFCOUNT(hdr) == 0 &&
1084 /* An object may be in heap_allocated list with a zero
1085 * refcount if it has just been finalized and is waiting
1086 * to be collected by the next cycle.
1087 */
1088 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr) == 0) {
1089 /* An object may be in heap_allocated list with a zero
1090 * refcount also if it is a temporary object created by
1091 * a finalizer; because finalization now runs inside
1092 * mark-and-sweep, such objects will not be queued to
1093 * refzero_list and will thus appear here with refcount
1094 * zero.
1095 */
1096#if 0 /* this case can no longer occur because refcount is unsigned */
1097 } else if (DUK_HEAPHDR_GET_REFCOUNT(hdr) < 0) {
1098 DUK_D(DUK_DPRINT("invalid refcount: %ld, %p -> %!O",
1099 (hdr != NULL ? (long) DUK_HEAPHDR_GET_REFCOUNT(hdr) : (long) 0),
1100 (void *) hdr, (duk_heaphdr *) hdr));
1102#endif
1103 }
1104 hdr = DUK_HEAPHDR_GET_NEXT(heap, hdr);
1105 }
1106}
1107#endif /* DUK_USE_REFERENCE_COUNTING */
1108#endif /* DUK_USE_ASSERTIONS */
1109
1110/*
1111 * Finalizer torture. Do one fake finalizer call which causes side effects
1112 * similar to one or more finalizers on actual objects.
1113 */
1114
1115#if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1116DUK_LOCAL duk_ret_t duk__markandsweep_fake_finalizer(duk_context *ctx) {
1117 DUK_D(DUK_DPRINT("fake mark-and-sweep torture finalizer executed"));
1118
1119 /* Require a lot of stack to force a value stack grow/shrink.
1120 * Recursive mark-and-sweep is prevented by allocation macros
1121 * so this won't trigger another mark-and-sweep.
1122 */
1123 duk_require_stack(ctx, 100000);
1124
1125 /* XXX: do something to force a callstack grow/shrink, perhaps
1126 * just a manual forced resize or a forced relocating realloc?
1127 */
1128
1129 return 0;
1130}
1131
1132DUK_LOCAL void duk__markandsweep_torture_finalizer(duk_hthread *thr) {
1133 duk_context *ctx;
1134 duk_int_t rc;
1135
1136 DUK_ASSERT(thr != NULL);
1137 ctx = (duk_context *) thr;
1138
1139 /* Avoid fake finalization when callstack limit has been reached.
1140 * Otherwise a callstack limit error will be created, then refzero'ed.
1141 */
1143 thr->callstack_size + 2 * DUK_CALLSTACK_GROW_STEP >= thr->callstack_max /*approximate*/) {
1144 DUK_D(DUK_DPRINT("call recursion depth reached, avoid fake mark-and-sweep torture finalizer"));
1145 return;
1146 }
1147
1148 /* Run fake finalizer. Avoid creating unnecessary garbage. */
1149 duk_push_c_function(ctx, duk__markandsweep_fake_finalizer, 0 /*nargs*/);
1150 rc = duk_pcall(ctx, 0 /*nargs*/);
1151 DUK_UNREF(rc); /* ignored */
1152 duk_pop(ctx);
1153}
1154#endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1155
1156/*
1157 * Main mark-and-sweep function.
1158 *
1159 * 'flags' represents the features requested by the caller. The current
1160 * heap->mark_and_sweep_base_flags is ORed automatically into the flags;
1161 * the base flags mask typically prevents certain mark-and-sweep operations
1162 * to avoid trouble.
1163 */
1164
1166 duk_hthread *thr;
1167 duk_size_t count_keep_obj;
1168 duk_size_t count_keep_str;
1169#ifdef DUK_USE_VOLUNTARY_GC
1170 duk_size_t tmp;
1171#endif
1172
1174 DUK_D(DUK_DPRINT("refuse to do a recursive mark-and-sweep"));
1175 return 0;
1176 }
1177
1178 /* XXX: thread selection for mark-and-sweep is currently a hack.
1179 * If we don't have a thread, the entire mark-and-sweep is now
1180 * skipped (although we could just skip finalizations).
1181 */
1182
1183 /* If thr != NULL, the thr may still be in the middle of
1184 * initialization.
1185 * XXX: Improve the thread viability test.
1186 */
1187 thr = duk__get_temp_hthread(heap);
1188 if (thr == NULL) {
1189 DUK_D(DUK_DPRINT("gc skipped because we don't have a temp thread"));
1190
1191 /* reset voluntary gc trigger count */
1192#ifdef DUK_USE_VOLUNTARY_GC
1194#endif
1195 return 0; /* OK */
1196 }
1197
1198 /* If debugger is paused, garbage collection is disabled by default. */
1199 /* XXX: will need a force flag if garbage collection is triggered
1200 * explicitly during paused state.
1201 */
1202#if defined(DUK_USE_DEBUGGER_SUPPORT)
1203 if (DUK_HEAP_IS_PAUSED(heap)) {
1204 /* Checking this here rather that in memory alloc primitives
1205 * reduces checking code there but means a failed allocation
1206 * will go through a few retries before giving up. That's
1207 * fine because this only happens during debugging.
1208 */
1209 DUK_D(DUK_DPRINT("gc skipped because debugger is paused"));
1210 return 0;
1211 }
1212#endif
1213
1214 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) starting, requested flags: 0x%08lx, effective flags: 0x%08lx",
1215 (unsigned long) flags, (unsigned long) (flags | heap->mark_and_sweep_base_flags)));
1216
1217 flags |= heap->mark_and_sweep_base_flags;
1218
1219 /*
1220 * Assertions before
1221 */
1222
1223#ifdef DUK_USE_ASSERTIONS
1227 duk__assert_heaphdr_flags(heap);
1228#ifdef DUK_USE_REFERENCE_COUNTING
1229 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1230 * finalizer may trigger a mark-and-sweep.
1231 */
1232 duk__assert_valid_refcounts(heap);
1233#endif /* DUK_USE_REFERENCE_COUNTING */
1234#endif /* DUK_USE_ASSERTIONS */
1235
1236 /*
1237 * Begin
1238 */
1239
1241
1242 /*
1243 * Mark roots, hoping that recursion limit is not normally hit.
1244 * If recursion limit is hit, run additional reachability rounds
1245 * starting from "temproots" until marking is complete.
1246 *
1247 * Marking happens in two phases: first we mark actual reachability
1248 * roots (and run "temproots" to complete the process). Then we
1249 * check which objects are unreachable and are finalizable; such
1250 * objects are marked as FINALIZABLE and marked as reachability
1251 * (and "temproots" is run again to complete the process).
1252 *
1253 * The heap finalize_list must also be marked as a reachability root.
1254 * There may be objects on the list from a previous round if the
1255 * previous run had finalizer skip flag.
1256 */
1257
1258 duk__mark_roots_heap(heap); /* main reachability roots */
1259#ifdef DUK_USE_REFERENCE_COUNTING
1260 duk__mark_refzero_list(heap); /* refzero_list treated as reachability roots */
1261#endif
1262 duk__mark_temproots_by_heap_scan(heap); /* temproots */
1263
1264 duk__mark_finalizable(heap); /* mark finalizable as reachability roots */
1265 duk__mark_finalize_list(heap); /* mark finalizer work list as reachability roots */
1266 duk__mark_temproots_by_heap_scan(heap); /* temproots */
1267
1268 /*
1269 * Sweep garbage and remove marking flags, and move objects with
1270 * finalizers to the finalizer work list.
1271 *
1272 * Objects to be swept need to get their refcounts finalized before
1273 * they are swept. In other words, their target object refcounts
1274 * need to be decreased. This has to be done before freeing any
1275 * objects to avoid decref'ing dangling pointers (which may happen
1276 * even without bugs, e.g. with reference loops)
1277 *
1278 * Because strings don't point to other heap objects, similar
1279 * finalization is not necessary for strings.
1280 */
1281
1282 /* XXX: more emergency behavior, e.g. find smaller hash sizes etc */
1283
1284#ifdef DUK_USE_REFERENCE_COUNTING
1286#endif
1287 duk__sweep_heap(heap, flags, &count_keep_obj);
1288#if defined(DUK_USE_STRTAB_CHAIN)
1289 duk__sweep_stringtable_chain(heap, &count_keep_str);
1290#elif defined(DUK_USE_STRTAB_PROBE)
1291 duk__sweep_stringtable_probe(heap, &count_keep_str);
1292#else
1293#error internal error, invalid strtab options
1294#endif
1295#ifdef DUK_USE_REFERENCE_COUNTING
1297#endif
1299
1300 /*
1301 * Object compaction (emergency only).
1302 *
1303 * Object compaction is a separate step after sweeping, as there is
1304 * more free memory for it to work with. Also, currently compaction
1305 * may insert new objects into the heap allocated list and the string
1306 * table which we don't want to do during a sweep (the reachability
1307 * flags of such objects would be incorrect). The objects inserted
1308 * are currently:
1309 *
1310 * - a temporary duk_hbuffer for a new properties allocation
1311 * - if array part is abandoned, string keys are interned
1312 *
1313 * The object insertions go to the front of the list, so they do not
1314 * cause an infinite loop (they are not compacted).
1315 */
1316
1317 if ((flags & DUK_MS_FLAG_EMERGENCY) &&
1320 }
1321
1322 /*
1323 * String table resize check.
1324 *
1325 * Note: this may silently (and safely) fail if GC is caused by an
1326 * allocation call in stringtable resize_hash(). Resize_hash()
1327 * will prevent a recursive call to itself by setting the
1328 * DUK_MS_FLAG_NO_STRINGTABLE_RESIZE in heap->mark_and_sweep_base_flags.
1329 */
1330
1331 /* XXX: stringtable emergency compaction? */
1332
1333 /* XXX: remove this feature entirely? it would only matter for
1334 * emergency GC. Disable for lowest memory builds.
1335 */
1336#if defined(DUK_USE_MS_STRINGTABLE_RESIZE)
1337 if (!(flags & DUK_MS_FLAG_NO_STRINGTABLE_RESIZE)) {
1338 DUK_DD(DUK_DDPRINT("resize stringtable: %p", (void *) heap));
1340 } else {
1341 DUK_D(DUK_DPRINT("stringtable resize skipped because DUK_MS_FLAG_NO_STRINGTABLE_RESIZE is set"));
1342 }
1343#endif
1344
1345 /*
1346 * Finalize objects in the finalization work list. Finalized
1347 * objects are queued back to heap_allocated with FINALIZED set.
1348 *
1349 * Since finalizers may cause arbitrary side effects, they are
1350 * prevented during string table and object property allocation
1351 * resizing using the DUK_MS_FLAG_NO_FINALIZERS flag in
1352 * heap->mark_and_sweep_base_flags. In this case the objects
1353 * remain in the finalization work list after mark-and-sweep
1354 * exits and they may be finalized on the next pass.
1355 *
1356 * Finalization currently happens inside "MARKANDSWEEP_RUNNING"
1357 * protection (no mark-and-sweep may be triggered by the
1358 * finalizers). As a side effect:
1359 *
1360 * 1) an out-of-memory error inside a finalizer will not
1361 * cause a mark-and-sweep and may cause the finalizer
1362 * to fail unnecessarily
1363 *
1364 * 2) any temporary objects whose refcount decreases to zero
1365 * during finalization will not be put into refzero_list;
1366 * they can only be collected by another mark-and-sweep
1367 *
1368 * This is not optimal, but since the sweep for this phase has
1369 * already happened, this is probably good enough for now.
1370 */
1371
1372#if defined(DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE)
1373 /* Cannot simulate individual finalizers because finalize_list only
1374 * contains objects with actual finalizers. But simulate side effects
1375 * from finalization by doing a bogus function call and resizing the
1376 * stacks.
1377 */
1378 if (flags & DUK_MS_FLAG_NO_FINALIZERS) {
1379 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, DUK_MS_FLAG_NO_FINALIZERS is set"));
1380 } else if (!(thr->valstack != NULL && thr->callstack != NULL && thr->catchstack != NULL)) {
1381 DUK_D(DUK_DPRINT("skip mark-and-sweep torture finalizer, thread not yet viable"));
1382 } else {
1383 DUK_D(DUK_DPRINT("run mark-and-sweep torture finalizer"));
1384 duk__markandsweep_torture_finalizer(thr);
1385 }
1386#endif /* DUK_USE_MARKANDSWEEP_FINALIZER_TORTURE */
1387
1388 if (flags & DUK_MS_FLAG_NO_FINALIZERS) {
1389 DUK_D(DUK_DPRINT("finalizer run skipped because DUK_MS_FLAG_NO_FINALIZERS is set"));
1390 } else {
1391 duk__run_object_finalizers(heap, flags);
1392 }
1393
1394 /*
1395 * Finish
1396 */
1397
1399
1400 /*
1401 * Assertions after
1402 */
1403
1404#ifdef DUK_USE_ASSERTIONS
1408 duk__assert_heaphdr_flags(heap);
1409#ifdef DUK_USE_REFERENCE_COUNTING
1410 /* Note: DUK_HEAP_HAS_REFZERO_FREE_RUNNING(heap) may be true; a refcount
1411 * finalizer may trigger a mark-and-sweep.
1412 */
1413 duk__assert_valid_refcounts(heap);
1414#endif /* DUK_USE_REFERENCE_COUNTING */
1415#endif /* DUK_USE_ASSERTIONS */
1416
1417 /*
1418 * Reset trigger counter
1419 */
1420
1421#ifdef DUK_USE_VOLUNTARY_GC
1422 tmp = (count_keep_obj + count_keep_str) / 256;
1426 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, trigger reset to %ld",
1427 (long) count_keep_obj, (long) count_keep_str, (long) heap->mark_and_sweep_trigger_counter));
1428#else
1429 DUK_D(DUK_DPRINT("garbage collect (mark-and-sweep) finished: %ld objects kept, %ld strings kept, no voluntary trigger",
1430 (long) count_keep_obj, (long) count_keep_str));
1431#endif
1432
1433 return 0; /* OK */
1434}
1435
1436#else /* DUK_USE_MARK_AND_SWEEP */
1437
1438/* no mark-and-sweep gc */
1439
1440#endif /* DUK_USE_MARK_AND_SWEEP */
unsigned int duk_small_uint_t
duk_small_int_t duk_ret_t
#define DUK_USE_MARK_AND_SWEEP_RECLIMIT
duk_int_fast32_t duk_int_t
duk_small_int_t duk_bool_t
duk_uint32_t duk_uint_fast32_t
#define DUK_MS_FLAG_EMERGENCY
#define DUK_HEAPHDR_CLEAR_FINALIZED(h)
#define DUK_MS_FLAG_NO_OBJECT_COMPACTION
#define DUK_STRTAB_DELETED_MARKER(heap)
#define DUK_HEAPHDR_SET_REACHABLE(h)
#define DUK_HOBJECT_A_GET_VALUE_PTR(heap, h, i)
#define DUK_HCOMPILEDFUNCTION_GET_CONSTS_BASE(heap, h)
#define DUK_HEAPHDR_SET_TEMPROOT(h)
#define DUK_HEAP_HAS_MARKANDSWEEP_RECLIMIT_REACHED(heap)
#define DUK_HEAPHDR_GET_TYPE(h)
#define DUK_HOBJECT_GET_PROTOTYPE(heap, h)
#define DUK_HEAP_CLEAR_MARKANDSWEEP_RECLIMIT_REACHED(heap)
#define DUK_HOBJECT_P_COMPUTE_SIZE(n_ent, n_arr, n_hash)
#define DUK_HEAPHDR_SET_NEXT(heap, h, val)
#define DUK_HEAP_SET_MARKANDSWEEP_RUNNING(heap)
#define DUK_HOBJECT_GET_ENEXT(h)
#define DUK_HEAPHDR_SET_PREV(heap, h, val)
#define DUK_HEAPHDR_SET_FINALIZABLE(h)
#define DUK_HOBJECT_IS_COMPILEDFUNCTION(h)
#define DUK_HEAPHDR_CLEAR_FINALIZABLE(h)
#define DUK_HEAP_INSERT_INTO_HEAP_ALLOCATED(heap, hdr)
#define DUK_HEAP_CLEAR_MARKANDSWEEP_RUNNING(heap)
DUK_EXTERNAL void duk_require_stack(duk_context *ctx, duk_idx_t extra)
#define DUK_HCOMPILEDFUNCTION_GET_FUNCS_BASE(heap, h)
#define DUK_HOBJECT_E_GET_KEY(heap, h, i)
#define DUK_HOBJECT_GET_ASIZE(h)
DUK_INTERNAL_DECL void duk_hobject_compact_props(duk_hthread *thr, duk_hobject *obj)
#define DUK_FREE(heap, ptr)
#define DUK_HEAPHDR_HAS_TEMPROOT(h)
DUK_INTERNAL_DECL void duk_free_hstring_inner(duk_heap *heap, duk_hstring *h)
DUK_EXTERNAL duk_idx_t duk_push_c_function(duk_context *ctx, duk_c_function func, duk_int_t nargs)
#define DUK_MS_FLAG_NO_FINALIZERS
#define DUK_HEAPHDR_HAS_REACHABLE(h)
#define DUK_HEAP_MARK_AND_SWEEP_TRIGGER_ADD
#define DUK_HEAPHDR_GET_REFCOUNT(h)
#define DUK_HOBJECT_IS_THREAD(h)
#define DUK_HOBJECT_IS_NATIVEFUNCTION(h)
#define DUK_HCOMPILEDFUNCTION_GET_FUNCS_END(heap, h)
#define DUK_HOBJECT_GET_HSIZE(h)
#define DUK_HEAPHDR_HAS_READONLY(h)
#define DUK_HOBJECT_IS_BUFFEROBJECT(h)
#define DUK_STRTAB_CHAIN_SIZE
#define DUK_HEAP_SET_MARKANDSWEEP_RECLIMIT_REACHED(heap)
DUK_INTERNAL_DECL duk_bool_t duk_hobject_hasprop_raw(duk_hthread *thr, duk_hobject *obj, duk_hstring *key)
#define DUK_HOBJECT_E_SLOT_IS_ACCESSOR(heap, h, i)
#define DUK_HEAP_HAS_MARKANDSWEEP_RUNNING(heap)
#define DUK_TVAL_IS_HEAP_ALLOCATED(tv)
#define DUK_MS_FLAG_NO_STRINGTABLE_RESIZE
#define DUK_HOBJECT_E_GET_VALUE_PTR(heap, h, i)
DUK_EXTERNAL duk_int_t duk_safe_call(duk_context *ctx, duk_safe_call_function func, duk_idx_t nargs, duk_idx_t nrets)
#define DUK_HEAPHDR_HAS_FINALIZED(h)
#define DUK_HEAP_GET_STRING(heap, idx)
DUK_INTERNAL_DECL void duk_heaphdr_refcount_finalize(duk_hthread *thr, duk_heaphdr *hdr)
#define DUK_HEAPHDR_CLEAR_REACHABLE(h)
#define DUK_HCOMPILEDFUNCTION_GET_DATA(heap, h)
#define DUK_HEAP_MARK_AND_SWEEP_TRIGGER_SKIP
#define DUK_ACT_GET_FUNC(act)
DUK_EXTERNAL void duk_pop(duk_context *ctx)
DUK_INTERNAL_DECL duk_hobject * duk_get_hobject(duk_context *ctx, duk_idx_t index)
DUK_INTERNAL_DECL void duk_push_hobject(duk_context *ctx, duk_hobject *h)
#define DUK_TVAL_GET_HEAPHDR(tv)
DUK_INTERNAL_DECL void duk_heap_strcache_string_remove(duk_heap *heap, duk_hstring *h)
#define DUK_CALLSTACK_GROW_STEP
#define DUK_HOBJECT_GET_ESIZE(h)
#define DUK_HEAPHDR_HAS_FINALIZABLE(h)
DUK_INTERNAL_DECL void duk_heap_force_strtab_resize(duk_heap *heap)
#define DUK_HTHREAD_STRING_INT_FINALIZER(thr)
#define DUK_HEAPHDR_GET_NEXT(heap, h)
#define DUK_HEAP_MARK_AND_SWEEP_TRIGGER_MULT
DUK_INTERNAL_DECL void duk_hobject_run_finalizer(duk_hthread *thr, duk_hobject *obj)
DUK_EXTERNAL duk_int_t duk_pcall(duk_context *ctx, duk_idx_t nargs)
#define DUK_MS_FLAG_SKIP_FINALIZERS
#define DUK_HCOMPILEDFUNCTION_GET_CONSTS_END(heap, h)
#define DUK_ASSERT_HEAPHDR_LINKS(heap, h)
DUK_INTERNAL_DECL void duk_heap_free_heaphdr_raw(duk_heap *heap, duk_heaphdr *hdr)
#define DUK_HEAPHDR_CLEAR_TEMPROOT(h)
DUK_LOCAL void duk__clear_refzero_list_flags(duk_heap *heap)
DUK_LOCAL void duk__mark_temproots_by_heap_scan(duk_heap *heap)
DUK_LOCAL_DECL void duk__mark_heaphdr(duk_heap *heap, duk_heaphdr *h)
DUK_LOCAL void duk__handle_temproot(duk_heap *heap, duk_heaphdr *hdr)
DUK_LOCAL void duk__mark_hstring(duk_heap *heap, duk_hstring *h)
DUK_LOCAL void duk__finalize_refcounts(duk_heap *heap)
DUK_LOCAL void duk__sweep_heap(duk_heap *heap, duk_int_t flags, duk_size_t *out_count_keep)
DUK_LOCAL int duk__protected_compact_object(duk_context *ctx)
DUK_LOCAL void duk__run_object_finalizers(duk_heap *heap, duk_small_uint_t flags)
DUK_INTERNAL duk_bool_t duk_heap_mark_and_sweep(duk_heap *heap, duk_small_uint_t flags)
DUK_LOCAL void duk__mark_roots_heap(duk_heap *heap)
DUK_LOCAL void duk__compact_object_list(duk_heap *heap, duk_hthread *thr, duk_heaphdr *start)
DUK_LOCAL void duk__mark_finalizable(duk_heap *heap)
DUK_LOCAL void duk__mark_finalize_list(duk_heap *heap)
DUK_LOCAL void duk__mark_refzero_list(duk_heap *heap)
DUK_LOCAL void duk__mark_hobject(duk_heap *heap, duk_hobject *h)
DUK_LOCAL_DECL void duk__mark_tval(duk_heap *heap, duk_tval *tv)
DUK_LOCAL void duk__clear_finalize_list_flags(duk_heap *heap)
DUK_LOCAL void duk__compact_objects(duk_heap *heap)
DUK_LOCAL void duk__sweep_stringtable_probe(duk_heap *heap, duk_size_t *out_count_keep)
DUK_LOCAL duk_hthread * duk__get_temp_hthread(duk_heap *heap)
#define NULL
Definition gmacros.h:924
#define next(ls)
duk_small_uint_t mark_and_sweep_base_flags
duk_hobject * builtins[DUK_NUM_BUILTINS]
duk_hstring * str
duk_size_t listlen
duk_hstring ** strlist
union duk_strtab_entry::@21 u