761
761
/* Should be called without allocation lock. */
762
762
int GC_invoke_finalizers()
764
register struct finalizable_object * curr_fo;
765
register int count = 0;
764
struct finalizable_object * curr_fo;
766
word mem_freed_before;
768
769
while (GC_finalize_now != 0) {
801
810
void GC_notify_or_invoke_finalizers GC_PROTO((void))
812
/* This is a convenient place to generate backtraces if appropriate, */
813
/* since that code is not callable with the allocation lock. */
814
# if defined(KEEP_BACK_PTRS) || defined(MAKE_BACK_GRAPH)
815
static word last_back_trace_gc_no = 1; /* Skip first one. */
817
if (GC_gc_no > last_back_trace_gc_no) {
820
# ifdef KEEP_BACK_PTRS
822
/* Stops when GC_gc_no wraps; that's OK. */
823
last_back_trace_gc_no = (word)(-1); /* disable others. */
824
for (i = 0; i < GC_backtraces; ++i) {
825
/* FIXME: This tolerates concurrent heap mutation, */
826
/* which may cause occasional mysterious results. */
827
/* We need to release the GC lock, since GC_print_callers */
828
/* acquires it. It probably shouldn't. */
830
GC_generate_random_backtrace_no_gc();
833
last_back_trace_gc_no = GC_gc_no;
836
# ifdef MAKE_BACK_GRAPH
837
if (GC_print_back_height)
838
GC_print_back_graph_stats();
803
842
if (GC_finalize_now == 0) return;
804
843
if (!GC_finalize_on_demand) {
805
844
(void) GC_invoke_finalizers();
806
GC_ASSERT(GC_finalize_now == 0);
846
GC_ASSERT(GC_finalize_now == 0);
847
# endif /* Otherwise GC can run concurrently and add more */
809
850
if (GC_finalizer_notifier != (void (*) GC_PROTO((void)))0
885
#if !defined(NO_DEBUGGING)
887
void GC_print_finalization_stats()
889
struct finalizable_object *fo = GC_finalize_now;
892
GC_printf2("%lu finalization table entries; %lu disappearing links\n",
893
GC_fo_entries, GC_dl_entries);
894
for (; 0 != fo; fo = fo_next(fo)) ++ready;
895
GC_printf1("%lu objects are eligible for immediate finalization\n", ready);
898
#endif /* NO_DEBUGGING */