2
* Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers
3
* Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
5
* THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6
* OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8
* Permission is hereby granted to use or copy this program
9
* for any purpose, provided the above notices are retained on all copies.
10
* Permission to modify the code and to distribute modified code is granted,
11
* provided the above notices are retained, and a notice that the code was
12
* modified is included with the above copyright notice.
14
/* Boehm, August 9, 1995 6:09 pm PDT */
15
# include "private/gc_priv.h"
18
* We maintain several hash tables of hblks that have had false hits.
19
* Each contains one bit per hash bucket; If any page in the bucket
20
* has had a false hit, we assume that all of them have.
21
* See the definition of page_hash_table in gc_private.h.
22
* False hits from the stack(s) are much more dangerous than false hits
23
* from elsewhere, since the former can pin a large object that spans the
24
* block, eventhough it does not start on the dangerous block.
28
* Externally callable routines are:
30
* GC_add_to_black_list_normal
31
* GC_add_to_black_list_stack
32
* GC_promote_black_lists
35
* All require that the allocator lock is held.
38
/* Pointers to individual tables. We replace one table by another by */
39
/* switching these pointers. */
40
word * GC_old_normal_bl;
41
/* Nonstack false references seen at last full */
43
word * GC_incomplete_normal_bl;
44
/* Nonstack false references seen since last */
45
/* full collection. */
46
word * GC_old_stack_bl;
47
word * GC_incomplete_stack_bl;
49
word GC_total_stack_black_listed;
51
word GC_black_list_spacing = MINHINCR*HBLKSIZE; /* Initial rough guess */
55
# if defined(__STDC__) || defined(__cplusplus)
56
void GC_default_print_heap_obj_proc(ptr_t p)
58
void GC_default_print_heap_obj_proc(p)
62
ptr_t base = GC_base(p);
64
GC_err_printf2("start: 0x%lx, appr. length: %ld", base, GC_size(base));
67
void (*GC_print_heap_obj) GC_PROTO((ptr_t p)) =
68
GC_default_print_heap_obj_proc;
70
void GC_print_source_ptr(p)
73
ptr_t base = GC_base(p);
76
GC_err_printf0("in register");
78
GC_err_printf0("in root set");
81
GC_err_printf0("in object at ");
82
(*GC_print_heap_obj)(base);
88
if (!GC_all_interior_pointers) {
89
GC_old_normal_bl = (word *)
90
GC_scratch_alloc((word)(sizeof (page_hash_table)));
91
GC_incomplete_normal_bl = (word *)GC_scratch_alloc
92
((word)(sizeof(page_hash_table)));
93
if (GC_old_normal_bl == 0 || GC_incomplete_normal_bl == 0) {
94
GC_err_printf0("Insufficient memory for black list\n");
97
GC_clear_bl(GC_old_normal_bl);
98
GC_clear_bl(GC_incomplete_normal_bl);
100
GC_old_stack_bl = (word *)GC_scratch_alloc((word)(sizeof(page_hash_table)));
101
GC_incomplete_stack_bl = (word *)GC_scratch_alloc
102
((word)(sizeof(page_hash_table)));
103
if (GC_old_stack_bl == 0 || GC_incomplete_stack_bl == 0) {
104
GC_err_printf0("Insufficient memory for black list\n");
107
GC_clear_bl(GC_old_stack_bl);
108
GC_clear_bl(GC_incomplete_stack_bl);
111
void GC_clear_bl(doomed)
114
BZERO(doomed, sizeof(page_hash_table));
117
void GC_copy_bl(old, new)
120
BCOPY(old, new, sizeof(page_hash_table));
123
static word total_stack_black_listed();
125
/* Signal the completion of a collection. Turn the incomplete black */
126
/* lists into new black lists, etc. */
127
void GC_promote_black_lists()
129
word * very_old_normal_bl = GC_old_normal_bl;
130
word * very_old_stack_bl = GC_old_stack_bl;
132
GC_old_normal_bl = GC_incomplete_normal_bl;
133
GC_old_stack_bl = GC_incomplete_stack_bl;
134
if (!GC_all_interior_pointers) {
135
GC_clear_bl(very_old_normal_bl);
137
GC_clear_bl(very_old_stack_bl);
138
GC_incomplete_normal_bl = very_old_normal_bl;
139
GC_incomplete_stack_bl = very_old_stack_bl;
140
GC_total_stack_black_listed = total_stack_black_listed();
142
GC_printf1("%ld bytes in heap blacklisted for interior pointers\n",
143
(unsigned long)GC_total_stack_black_listed);
145
if (GC_total_stack_black_listed != 0) {
146
GC_black_list_spacing =
147
HBLKSIZE*(GC_heapsize/GC_total_stack_black_listed);
149
if (GC_black_list_spacing < 3 * HBLKSIZE) {
150
GC_black_list_spacing = 3 * HBLKSIZE;
152
if (GC_black_list_spacing > MAXHINCR * HBLKSIZE) {
153
GC_black_list_spacing = MAXHINCR * HBLKSIZE;
154
/* Makes it easier to allocate really huge blocks, which otherwise */
155
/* may have problems with nonuniform blacklist distributions. */
156
/* This way we should always succeed immediately after growing the */
161
void GC_unpromote_black_lists()
163
if (!GC_all_interior_pointers) {
164
GC_copy_bl(GC_old_normal_bl, GC_incomplete_normal_bl);
166
GC_copy_bl(GC_old_stack_bl, GC_incomplete_stack_bl);
169
/* P is not a valid pointer reference, but it falls inside */
170
/* the plausible heap bounds. */
171
/* Add it to the normal incomplete black list if appropriate. */
172
#ifdef PRINT_BLACK_LIST
173
void GC_add_to_black_list_normal(p, source)
176
void GC_add_to_black_list_normal(p)
180
if (!(GC_modws_valid_offsets[p & (sizeof(word)-1)])) return;
182
register int index = PHT_HASH(p);
184
if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_normal_bl, index)) {
185
# ifdef PRINT_BLACK_LIST
186
if (!get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
188
"Black listing (normal) 0x%lx referenced from 0x%lx ",
189
(unsigned long) p, (unsigned long) source);
190
GC_print_source_ptr(source);
194
set_pht_entry_from_index(GC_incomplete_normal_bl, index);
195
} /* else this is probably just an interior pointer to an allocated */
196
/* object, and isn't worth black listing. */
200
/* And the same for false pointers from the stack. */
201
#ifdef PRINT_BLACK_LIST
202
void GC_add_to_black_list_stack(p, source)
205
void GC_add_to_black_list_stack(p)
209
register int index = PHT_HASH(p);
211
if (HDR(p) == 0 || get_pht_entry_from_index(GC_old_stack_bl, index)) {
212
# ifdef PRINT_BLACK_LIST
213
if (!get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
215
"Black listing (stack) 0x%lx referenced from 0x%lx ",
216
(unsigned long)p, (unsigned long)source);
217
GC_print_source_ptr(source);
221
set_pht_entry_from_index(GC_incomplete_stack_bl, index);
226
* Is the block starting at h of size len bytes black listed? If so,
227
* return the address of the next plausible r such that (r, len) might not
228
* be black listed. (R may not actually be in the heap. We guarantee only
229
* that every smaller value of r after h is also black listed.)
230
* If (h,len) is not black listed, return 0.
231
* Knows about the structure of the black list hash tables.
233
struct hblk * GC_is_black_listed(h, len)
237
register int index = PHT_HASH((word)h);
239
word nblocks = divHBLKSZ(len);
241
if (!GC_all_interior_pointers) {
242
if (get_pht_entry_from_index(GC_old_normal_bl, index)
243
|| get_pht_entry_from_index(GC_incomplete_normal_bl, index)) {
249
if (GC_old_stack_bl[divWORDSZ(index)] == 0
250
&& GC_incomplete_stack_bl[divWORDSZ(index)] == 0) {
252
i += WORDSZ - modWORDSZ(index);
254
if (get_pht_entry_from_index(GC_old_stack_bl, index)
255
|| get_pht_entry_from_index(GC_incomplete_stack_bl, index)) {
260
if (i >= nblocks) break;
261
index = PHT_HASH((word)(h+i));
267
/* Return the number of blacklisted blocks in a given range. */
268
/* Used only for statistical purposes. */
269
/* Looks only at the GC_incomplete_stack_bl. */
270
word GC_number_stack_black_listed(start, endp1)
271
struct hblk *start, *endp1;
273
register struct hblk * h;
276
for (h = start; h < endp1; h++) {
277
register int index = PHT_HASH((word)h);
279
if (get_pht_entry_from_index(GC_old_stack_bl, index)) result++;
285
/* Return the total number of (stack) black-listed bytes. */
286
static word total_stack_black_listed()
291
for (i = 0; i < GC_n_heap_sects; i++) {
292
struct hblk * start = (struct hblk *) GC_heap_sects[i].hs_start;
293
word len = (word) GC_heap_sects[i].hs_bytes;
294
struct hblk * endp1 = start + len/HBLKSIZE;
296
total += GC_number_stack_black_listed(start, endp1);
298
return(total * HBLKSIZE);