17
17
#include "private/gc_priv.h"
19
# ifdef STUBBORN_ALLOC
19
#if defined(MANUAL_VDB)
20
20
/* Stubborn object (hard to change, nearly immutable) allocation. */
22
extern ptr_t GC_clear_stack(); /* in misc.c, behaves like identity */
24
#define GENERAL_MALLOC(lb,k) \
25
(GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
27
/* Data structure representing immutable objects that */
28
/* are still being initialized. */
29
/* This is a bit baroque in order to avoid acquiring */
30
/* the lock twice for a typical allocation. */
32
GC_PTR * GC_changing_list_start;
34
void GC_push_stubborn_structures GC_PROTO((void))
36
GC_push_all((ptr_t)(&GC_changing_list_start),
37
(ptr_t)(&GC_changing_list_start) + sizeof(GC_PTR *));
41
VOLATILE GC_PTR * VOLATILE GC_changing_list_current;
43
GC_PTR * GC_changing_list_current;
45
/* Points at last added element. Also (ab)used for */
46
/* synchronization. Updates and reads are assumed atomic. */
48
GC_PTR * GC_changing_list_limit;
49
/* Points at the last word of the buffer, which is always 0 */
50
/* All entries in (GC_changing_list_current, */
51
/* GC_changing_list_limit] are 0 */
54
void GC_stubborn_init()
58
GC_changing_list_start = (GC_PTR *)
60
(word)(INIT_SIZE * sizeof(GC_PTR)),
62
BZERO(GC_changing_list_start,
63
INIT_SIZE * sizeof(GC_PTR));
64
if (GC_changing_list_start == 0) {
65
GC_err_printf0("Insufficient space to start up\n");
66
ABORT("GC_stubborn_init: put of space");
68
GC_changing_list_current = GC_changing_list_start;
69
GC_changing_list_limit = GC_changing_list_start + INIT_SIZE - 1;
70
* GC_changing_list_limit = 0;
73
/* Compact and possibly grow GC_uninit_list. The old copy is */
74
/* left alone. Lock must be held. */
75
/* When called GC_changing_list_current == GC_changing_list_limit */
76
/* which is one past the current element. */
77
/* When we finish GC_changing_list_current again points one past last */
79
/* Invariant while this is running: GC_changing_list_current */
80
/* points at a word containing 0. */
81
/* Returns FALSE on failure. */
82
GC_bool GC_compact_changing_list()
84
register GC_PTR *p, *q;
85
register word count = 0;
86
word old_size = (char **)GC_changing_list_limit
87
- (char **)GC_changing_list_start+1;
88
/* The casts are needed as a workaround for an Amiga bug */
89
register word new_size = old_size;
92
for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
95
if (2 * count > old_size) new_size = 2 * count;
98
new_size * sizeof(GC_PTR), PTRFREE);
99
/* PTRFREE is a lie. But we don't want the collector to */
100
/* consider these. We do want the list itself to be */
102
if (new_list == 0) return(FALSE);
103
BZERO(new_list, new_size * sizeof(GC_PTR));
105
for (p = GC_changing_list_start; p < GC_changing_list_limit; p++) {
106
if (*p != 0) *q++ = *p;
108
GC_changing_list_start = new_list;
109
GC_changing_list_limit = new_list + new_size - 1;
110
GC_changing_list_current = q;
114
/* Add p to changing list. Clear p on failure. */
115
# define ADD_CHANGING(p) \
117
register struct hblk * h = HBLKPTR(p); \
118
register word index = PHT_HASH(h); \
120
set_pht_entry_from_index(GC_changed_pages, index); \
122
if (*GC_changing_list_current != 0 \
123
&& ++GC_changing_list_current == GC_changing_list_limit) { \
124
if (!GC_compact_changing_list()) (p) = 0; \
126
*GC_changing_list_current = p;
128
void GC_change_stubborn(p)
140
void GC_end_stubborn_change(p)
144
register VOLATILE GC_PTR * my_current = GC_changing_list_current;
146
register GC_PTR * my_current = GC_changing_list_current;
148
register GC_bool tried_quick;
151
if (*my_current == p) {
152
/* Hopefully the normal case. */
153
/* Compaction could not have been running when we started. */
156
if (my_current == GC_changing_list_current) {
157
/* Compaction can't have run in the interim. */
158
/* We got away with the quick and dirty approach. */
170
my_current = GC_changing_list_current;
171
for (; my_current >= GC_changing_list_start; my_current--) {
172
if (*my_current == p) {
180
GC_err_printf1("Bad arg to GC_end_stubborn_change: 0x%lx\n",
182
ABORT("Bad arg to GC_end_stubborn_change");
188
/* Allocate lb bytes of composite (pointerful) data */
189
/* No pointer fields may be changed after a call to */
190
/* GC_end_stubborn_change(p) where p is the value */
191
/* returned by GC_malloc_stubborn. */
193
GC_PTR GC_malloc_stubborn(size_t lb)
195
GC_PTR GC_malloc_stubborn(lb)
205
if( SMALL_OBJ(lb) ) {
207
lw = GC_size_map[lb];
209
lw = ALIGNED_WORDS(lb);
211
opp = &(GC_sobjfreelist[lw]);
213
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
215
result = GC_generic_malloc((word)lb, STUBBORN);
220
GC_words_allocd += lw;
221
result = (GC_PTR) op;
222
ADD_CHANGING(result);
224
return((GC_PTR)result);
227
GC_generic_malloc((word)lb, STUBBORN);
232
ADD_CHANGING(result);
235
return((GC_PTR)GC_clear_stack(result));
239
/* Functions analogous to GC_read_dirty and GC_page_was_dirty. */
240
/* Report pages on which stubborn objects were changed. */
241
void GC_read_changed()
243
register GC_PTR * p = GC_changing_list_start;
245
register struct hblk * h;
248
if (p == 0) /* initializing */ return;
249
BCOPY(GC_changed_pages, GC_prev_changed_pages,
250
(sizeof GC_changed_pages));
251
BZERO(GC_changed_pages, (sizeof GC_changed_pages));
252
for (; p <= GC_changing_list_current; p++) {
256
set_pht_entry_from_index(GC_changed_pages, index);
261
GC_bool GC_page_was_changed(h)
264
register word index = PHT_HASH(h);
266
return(get_pht_entry_from_index(GC_prev_changed_pages, index));
269
/* Remove unreachable entries from changed list. Should only be */
270
/* called with mark bits consistent and lock held. */
271
void GC_clean_changing_list()
273
register GC_PTR * p = GC_changing_list_start;
276
register unsigned long count = 0;
277
register unsigned long dropped_count = 0;
279
if (p == 0) /* initializing */ return;
280
for (; p <= GC_changing_list_current; p++) {
283
r = (ptr_t)GC_base(q);
284
if (r == 0 || !GC_is_marked(r)) {
292
GC_printf2("%lu entries in changing list: reclaimed %lu\n",
293
(unsigned long)count, (unsigned long)dropped_count);
298
#else /* !STUBBORN_ALLOC */
301
GC_PTR GC_malloc_stubborn(size_t lb)
303
GC_PTR GC_malloc_stubborn(lb)
307
return(GC_malloc(lb));
311
void GC_end_stubborn_change(p)
317
void GC_change_stubborn(p)
322
void GC_push_stubborn_structures GC_PROTO((void))
21
/* This interface is deprecated. We mostly emulate it using */
22
/* MANUAL_VDB. But that imposes the additional constraint that */
23
/* written, but not yet GC_dirty()ed objects must be referenced */
25
void * GC_malloc_stubborn(size_t lb)
27
return(GC_malloc(lb));
31
void GC_end_stubborn_change(void *p)
37
void GC_change_stubborn(void *p)
41
#else /* !MANUAL_VDB */
43
void * GC_malloc_stubborn(size_t lb)
45
return(GC_malloc(lb));
49
void GC_end_stubborn_change(void *p)
54
void GC_change_stubborn(void *p)
58
#endif /* !MANUAL_VDB */