2
* Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved.
3
* opyright (c) 1999-2000 by Hewlett-Packard Company. All rights reserved.
5
* THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED
6
* OR IMPLIED. ANY USE IS AT YOUR OWN RISK.
8
* Permission is hereby granted to use or copy this program
9
* for any purpose, provided the above notices are retained on all copies.
10
* Permission to modify the code and to distribute modified code is granted,
11
* provided the above notices are retained, and a notice that the code was
12
* modified is included with the above copyright notice.
18
* Some simple primitives for allocation with explicit type information.
19
* Simple objects are allocated such that they contain a GC_descr at the
20
* end (in the last allocated word). This descriptor may be a procedure
21
* which then examines an extended descriptor passed as its environment.
23
* Arrays are treated as simple objects if they have sufficiently simple
24
* structure. Otherwise they are allocated from an array kind that supplies
25
* a special mark procedure. These arrays contain a pointer to a
26
* complex_descriptor as their last word.
27
* This is done because the environment field is too small, and the collector
28
* must trace the complex_descriptor.
30
* Note that descriptors inside objects may appear cleared, if we encounter a
31
* false refrence to an object on a free list. In the GC_descr case, this
32
* is OK, since a 0 descriptor corresponds to examining no fields.
33
* In the complex_descriptor case, we explicitly check for that case.
35
* MAJOR PARTS OF THIS CODE HAVE NOT BEEN TESTED AT ALL and are not testable,
36
* since they are not accessible through the current interface.
39
#include "private/gc_pmark.h"
42
# define TYPD_EXTRA_BYTES (sizeof(word) - EXTRA_BYTES)
44
GC_bool GC_explicit_typing_initialized = FALSE;
46
int GC_explicit_kind; /* Object kind for objects with indirect */
47
/* (possibly extended) descriptors. */
49
int GC_array_kind; /* Object kind for objects with complex */
50
/* descriptors and GC_array_mark_proc. */
52
/* Extended descriptors. GC_typed_mark_proc understands these. */
53
/* These are used for simple objects that are larger than what */
54
/* can be described by a BITMAP_BITS sized bitmap. */
56
word ed_bitmap; /* lsb corresponds to first word. */
57
GC_bool ed_continued; /* next entry is continuation. */
60
/* Array descriptors. GC_array_mark_proc understands these. */
61
/* We may eventually need to add provisions for headers and */
62
/* trailers. Hence we provide for tree structured descriptors, */
63
/* though we don't really use them currently. */
64
typedef union ComplexDescriptor {
65
struct LeafDescriptor { /* Describes simple array */
68
word ld_size; /* bytes per element */
69
/* multiple of ALIGNMENT */
70
word ld_nelements; /* Number of elements. */
71
GC_descr ld_descriptor; /* A simple length, bitmap, */
72
/* or procedure descriptor. */
74
struct ComplexArrayDescriptor {
78
union ComplexDescriptor * ad_element_descr;
80
struct SequenceDescriptor {
82
# define SEQUENCE_TAG 3
83
union ComplexDescriptor * sd_first;
84
union ComplexDescriptor * sd_second;
89
ext_descr * GC_ext_descriptors; /* Points to array of extended */
92
word GC_ed_size = 0; /* Current size of above arrays. */
93
# define ED_INITIAL_SIZE 100;
95
word GC_avail_descr = 0; /* Next available slot. */
97
int GC_typed_mark_proc_index; /* Indices of my mark */
98
int GC_array_mark_proc_index; /* procedures. */
100
/* Add a multiword bitmap to GC_ext_descriptors arrays. Return */
101
/* starting index. */
102
/* Returns -1 on failure. */
103
/* Caller does not hold allocation lock. */
104
signed_word GC_add_ext_descriptor(bm, nbits)
108
register size_t nwords = divWORDSZ(nbits + WORDSZ-1);
109
register signed_word result;
111
register word last_part;
112
register int extra_bits;
117
while (GC_avail_descr + nwords >= GC_ed_size) {
120
word ed_size = GC_ed_size;
125
new_size = ED_INITIAL_SIZE;
127
new_size = 2 * ed_size;
128
if (new_size > MAX_ENV) return(-1);
130
new = (ext_descr *) GC_malloc_atomic(new_size * sizeof(ext_descr));
131
if (new == 0) return(-1);
134
if (ed_size == GC_ed_size) {
135
if (GC_avail_descr != 0) {
136
BCOPY(GC_ext_descriptors, new,
137
GC_avail_descr * sizeof(ext_descr));
139
GC_ed_size = new_size;
140
GC_ext_descriptors = new;
141
} /* else another thread already resized it in the meantime */
143
result = GC_avail_descr;
144
for (i = 0; i < nwords-1; i++) {
145
GC_ext_descriptors[result + i].ed_bitmap = bm[i];
146
GC_ext_descriptors[result + i].ed_continued = TRUE;
149
/* Clear irrelevant bits. */
150
extra_bits = nwords * WORDSZ - nbits;
151
last_part <<= extra_bits;
152
last_part >>= extra_bits;
153
GC_ext_descriptors[result + i].ed_bitmap = last_part;
154
GC_ext_descriptors[result + i].ed_continued = FALSE;
155
GC_avail_descr += nwords;
161
/* Table of bitmap descriptors for n word long all pointer objects. */
162
GC_descr GC_bm_table[WORDSZ/2];
164
/* Return a descriptor for the concatenation of 2 nwords long objects, */
165
/* each of which is described by descriptor. */
166
/* The result is known to be short enough to fit into a bitmap */
168
/* Descriptor is a GC_DS_LENGTH or GC_DS_BITMAP descriptor. */
169
GC_descr GC_double_descr(descriptor, nwords)
170
register GC_descr descriptor;
171
register word nwords;
173
if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
174
descriptor = GC_bm_table[BYTES_TO_WORDS((word)descriptor)];
176
descriptor |= (descriptor & ~GC_DS_TAGS) >> nwords;
180
complex_descriptor * GC_make_sequence_descriptor();
182
/* Build a descriptor for an array with nelements elements, */
183
/* each of which can be described by a simple descriptor. */
184
/* We try to optimize some common cases. */
185
/* If the result is COMPLEX, then a complex_descr* is returned */
187
/* If the result is LEAF, then we built a LeafDescriptor in */
188
/* the structure pointed to by leaf. */
189
/* The tag in the leaf structure is not set. */
190
/* If the result is SIMPLE, then a GC_descr */
191
/* is returned in *simple_d. */
192
/* If the result is NO_MEM, then */
193
/* we failed to allocate the descriptor. */
194
/* The implementation knows that GC_DS_LENGTH is 0. */
195
/* *leaf, *complex_d, and *simple_d may be used as temporaries */
196
/* during the construction. */
201
int GC_make_array_descriptor(nelements, size, descriptor,
202
simple_d, complex_d, leaf)
207
complex_descriptor **complex_d;
208
struct LeafDescriptor * leaf;
210
# define OPT_THRESHOLD 50
211
/* For larger arrays, we try to combine descriptors of adjacent */
212
/* descriptors to speed up marking, and to reduce the amount */
213
/* of space needed on the mark stack. */
214
if ((descriptor & GC_DS_TAGS) == GC_DS_LENGTH) {
215
if ((word)descriptor == size) {
216
*simple_d = nelements * descriptor;
218
} else if ((word)descriptor == 0) {
219
*simple_d = (GC_descr)0;
223
if (nelements <= OPT_THRESHOLD) {
224
if (nelements <= 1) {
225
if (nelements == 1) {
226
*simple_d = descriptor;
229
*simple_d = (GC_descr)0;
233
} else if (size <= BITMAP_BITS/2
234
&& (descriptor & GC_DS_TAGS) != GC_DS_PROC
235
&& (size & (sizeof(word)-1)) == 0) {
237
GC_make_array_descriptor(nelements/2, 2*size,
238
GC_double_descr(descriptor,
239
BYTES_TO_WORDS(size)),
240
simple_d, complex_d, leaf);
241
if ((nelements & 1) == 0) {
244
struct LeafDescriptor * one_element =
245
(struct LeafDescriptor *)
246
GC_malloc_atomic(sizeof(struct LeafDescriptor));
248
if (result == NO_MEM || one_element == 0) return(NO_MEM);
249
one_element -> ld_tag = LEAF_TAG;
250
one_element -> ld_size = size;
251
one_element -> ld_nelements = 1;
252
one_element -> ld_descriptor = descriptor;
256
struct LeafDescriptor * beginning =
257
(struct LeafDescriptor *)
258
GC_malloc_atomic(sizeof(struct LeafDescriptor));
259
if (beginning == 0) return(NO_MEM);
260
beginning -> ld_tag = LEAF_TAG;
261
beginning -> ld_size = size;
262
beginning -> ld_nelements = 1;
263
beginning -> ld_descriptor = *simple_d;
264
*complex_d = GC_make_sequence_descriptor(
265
(complex_descriptor *)beginning,
266
(complex_descriptor *)one_element);
271
struct LeafDescriptor * beginning =
272
(struct LeafDescriptor *)
273
GC_malloc_atomic(sizeof(struct LeafDescriptor));
274
if (beginning == 0) return(NO_MEM);
275
beginning -> ld_tag = LEAF_TAG;
276
beginning -> ld_size = leaf -> ld_size;
277
beginning -> ld_nelements = leaf -> ld_nelements;
278
beginning -> ld_descriptor = leaf -> ld_descriptor;
279
*complex_d = GC_make_sequence_descriptor(
280
(complex_descriptor *)beginning,
281
(complex_descriptor *)one_element);
285
*complex_d = GC_make_sequence_descriptor(
287
(complex_descriptor *)one_element);
294
leaf -> ld_size = size;
295
leaf -> ld_nelements = nelements;
296
leaf -> ld_descriptor = descriptor;
301
complex_descriptor * GC_make_sequence_descriptor(first, second)
302
complex_descriptor * first;
303
complex_descriptor * second;
305
struct SequenceDescriptor * result =
306
(struct SequenceDescriptor *)
307
GC_malloc(sizeof(struct SequenceDescriptor));
308
/* Can't result in overly conservative marking, since tags are */
309
/* very small integers. Probably faster than maintaining type */
312
result -> sd_tag = SEQUENCE_TAG;
313
result -> sd_first = first;
314
result -> sd_second = second;
316
return((complex_descriptor *)result);
320
complex_descriptor * GC_make_complex_array_descriptor(nelements, descr)
322
complex_descriptor * descr;
324
struct ComplexArrayDescriptor * result =
325
(struct ComplexArrayDescriptor *)
326
GC_malloc(sizeof(struct ComplexArrayDescriptor));
329
result -> ad_tag = ARRAY_TAG;
330
result -> ad_nelements = nelements;
331
result -> ad_element_descr = descr;
333
return((complex_descriptor *)result);
337
ptr_t * GC_eobjfreelist;
339
ptr_t * GC_arobjfreelist;
341
mse * GC_typed_mark_proc GC_PROTO((register word * addr,
342
register mse * mark_stack_ptr,
343
mse * mark_stack_limit,
346
mse * GC_array_mark_proc GC_PROTO((register word * addr,
347
register mse * mark_stack_ptr,
348
mse * mark_stack_limit,
351
/* Caller does not hold allocation lock. */
352
void GC_init_explicit_typing()
359
if (sizeof(struct LeafDescriptor) % sizeof(word) != 0)
360
ABORT("Bad leaf descriptor size");
364
if (GC_explicit_typing_initialized) {
369
GC_explicit_typing_initialized = TRUE;
370
/* Set up object kind with simple indirect descriptor. */
371
GC_eobjfreelist = (ptr_t *)GC_new_free_list_inner();
372
GC_explicit_kind = GC_new_kind_inner(
373
(void **)GC_eobjfreelist,
374
(((word)WORDS_TO_BYTES(-1)) | GC_DS_PER_OBJECT),
376
/* Descriptors are in the last word of the object. */
377
GC_typed_mark_proc_index = GC_new_proc_inner(GC_typed_mark_proc);
378
/* Set up object kind with array descriptor. */
379
GC_arobjfreelist = (ptr_t *)GC_new_free_list_inner();
380
GC_array_mark_proc_index = GC_new_proc_inner(GC_array_mark_proc);
381
GC_array_kind = GC_new_kind_inner(
382
(void **)GC_arobjfreelist,
383
GC_MAKE_PROC(GC_array_mark_proc_index, 0),
385
for (i = 0; i < WORDSZ/2; i++) {
386
GC_descr d = (((word)(-1)) >> (WORDSZ - i)) << (WORDSZ - i);
394
# if defined(__STDC__) || defined(__cplusplus)
395
mse * GC_typed_mark_proc(register word * addr,
396
register mse * mark_stack_ptr,
397
mse * mark_stack_limit,
400
mse * GC_typed_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
401
register word * addr;
402
register mse * mark_stack_ptr;
403
mse * mark_stack_limit;
407
register word bm = GC_ext_descriptors[env].ed_bitmap;
408
register word * current_p = addr;
409
register word current;
410
register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;
411
register ptr_t least_ha = GC_least_plausible_heap_addr;
413
for (; bm != 0; bm >>= 1, current_p++) {
415
current = *current_p;
416
FIXUP_POINTER(current);
417
if ((ptr_t)current >= least_ha && (ptr_t)current <= greatest_ha) {
418
PUSH_CONTENTS((ptr_t)current, mark_stack_ptr,
419
mark_stack_limit, current_p, exit1);
423
if (GC_ext_descriptors[env].ed_continued) {
424
/* Push an entry with the rest of the descriptor back onto the */
425
/* stack. Thus we never do too much work at once. Note that */
426
/* we also can't overflow the mark stack unless we actually */
427
/* mark something. */
429
if (mark_stack_ptr >= mark_stack_limit) {
430
mark_stack_ptr = GC_signal_mark_stack_overflow(mark_stack_ptr);
432
mark_stack_ptr -> mse_start = addr + WORDSZ;
433
mark_stack_ptr -> mse_descr =
434
GC_MAKE_PROC(GC_typed_mark_proc_index, env+1);
436
return(mark_stack_ptr);
439
/* Return the size of the object described by d. It would be faster to */
440
/* store this directly, or to compute it as part of */
441
/* GC_push_complex_descriptor, but hopefully it doesn't matter. */
442
word GC_descr_obj_size(d)
443
register complex_descriptor *d;
447
return(d -> ld.ld_nelements * d -> ld.ld_size);
449
return(d -> ad.ad_nelements
450
* GC_descr_obj_size(d -> ad.ad_element_descr));
452
return(GC_descr_obj_size(d -> sd.sd_first)
453
+ GC_descr_obj_size(d -> sd.sd_second));
455
ABORT("Bad complex descriptor");
456
/*NOTREACHED*/ return 0; /*NOTREACHED*/
460
/* Push descriptors for the object at addr with complex descriptor d */
461
/* onto the mark stack. Return 0 if the mark stack overflowed. */
462
mse * GC_push_complex_descriptor(addr, d, msp, msl)
464
register complex_descriptor *d;
468
register ptr_t current = (ptr_t) addr;
469
register word nelements;
476
register GC_descr descr = d -> ld.ld_descriptor;
478
nelements = d -> ld.ld_nelements;
479
if (msl - msp <= (ptrdiff_t)nelements) return(0);
480
sz = d -> ld.ld_size;
481
for (i = 0; i < nelements; i++) {
483
msp -> mse_start = (word *)current;
484
msp -> mse_descr = descr;
491
register complex_descriptor *descr = d -> ad.ad_element_descr;
493
nelements = d -> ad.ad_nelements;
494
sz = GC_descr_obj_size(descr);
495
for (i = 0; i < nelements; i++) {
496
msp = GC_push_complex_descriptor((word *)current, descr,
498
if (msp == 0) return(0);
505
sz = GC_descr_obj_size(d -> sd.sd_first);
506
msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_first,
508
if (msp == 0) return(0);
510
msp = GC_push_complex_descriptor((word *)current, d -> sd.sd_second,
515
ABORT("Bad complex descriptor");
516
/*NOTREACHED*/ return 0; /*NOTREACHED*/
521
# if defined(__STDC__) || defined(__cplusplus)
522
mse * GC_array_mark_proc(register word * addr,
523
register mse * mark_stack_ptr,
524
mse * mark_stack_limit,
527
mse * GC_array_mark_proc(addr, mark_stack_ptr, mark_stack_limit, env)
528
register word * addr;
529
register mse * mark_stack_ptr;
530
mse * mark_stack_limit;
534
register hdr * hhdr = HDR(addr);
535
register word sz = hhdr -> hb_sz;
536
register complex_descriptor * descr = (complex_descriptor *)(addr[sz-1]);
537
mse * orig_mark_stack_ptr = mark_stack_ptr;
538
mse * new_mark_stack_ptr;
541
/* Found a reference to a free list entry. Ignore it. */
542
return(orig_mark_stack_ptr);
544
/* In use counts were already updated when array descriptor was */
545
/* pushed. Here we only replace it by subobject descriptors, so */
546
/* no update is necessary. */
547
new_mark_stack_ptr = GC_push_complex_descriptor(addr, descr,
550
if (new_mark_stack_ptr == 0) {
551
/* Doesn't fit. Conservatively push the whole array as a unit */
552
/* and request a mark stack expansion. */
553
/* This cannot cause a mark stack overflow, since it replaces */
554
/* the original array entry. */
555
GC_mark_stack_too_small = TRUE;
556
new_mark_stack_ptr = orig_mark_stack_ptr + 1;
557
new_mark_stack_ptr -> mse_start = addr;
558
new_mark_stack_ptr -> mse_descr = WORDS_TO_BYTES(sz) | GC_DS_LENGTH;
560
/* Push descriptor itself */
561
new_mark_stack_ptr++;
562
new_mark_stack_ptr -> mse_start = addr + sz - 1;
563
new_mark_stack_ptr -> mse_descr = sizeof(word) | GC_DS_LENGTH;
565
return(new_mark_stack_ptr);
568
#if defined(__STDC__) || defined(__cplusplus)
569
GC_descr GC_make_descriptor(GC_bitmap bm, size_t len)
571
GC_descr GC_make_descriptor(bm, len)
576
register signed_word last_set_bit = len - 1;
577
register word result;
579
# define HIGH_BIT (((word)1) << (WORDSZ - 1))
581
if (!GC_explicit_typing_initialized) GC_init_explicit_typing();
582
while (last_set_bit >= 0 && !GC_get_bit(bm, last_set_bit)) last_set_bit --;
583
if (last_set_bit < 0) return(0 /* no pointers */);
584
# if ALIGNMENT == CPP_WORDSZ/8
586
register GC_bool all_bits_set = TRUE;
587
for (i = 0; i < last_set_bit; i++) {
588
if (!GC_get_bit(bm, i)) {
589
all_bits_set = FALSE;
594
/* An initial section contains all pointers. Use length descriptor. */
595
return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
599
if (last_set_bit < BITMAP_BITS) {
600
/* Hopefully the common case. */
601
/* Build bitmap descriptor (with bits reversed) */
603
for (i = last_set_bit - 1; i >= 0; i--) {
605
if (GC_get_bit(bm, i)) result |= HIGH_BIT;
607
result |= GC_DS_BITMAP;
612
index = GC_add_ext_descriptor(bm, (word)last_set_bit+1);
613
if (index == -1) return(WORDS_TO_BYTES(last_set_bit+1) | GC_DS_LENGTH);
614
/* Out of memory: use conservative */
616
result = GC_MAKE_PROC(GC_typed_mark_proc_index, (word)index);
621
ptr_t GC_clear_stack();
623
#define GENERAL_MALLOC(lb,k) \
624
(GC_PTR)GC_clear_stack(GC_generic_malloc((word)lb, k))
626
#define GENERAL_MALLOC_IOP(lb,k) \
627
(GC_PTR)GC_clear_stack(GC_generic_malloc_ignore_off_page(lb, k))
629
#if defined(__STDC__) || defined(__cplusplus)
630
void * GC_malloc_explicitly_typed(size_t lb, GC_descr d)
632
char * GC_malloc_explicitly_typed(lb, d)
638
register ptr_t * opp;
642
lb += TYPD_EXTRA_BYTES;
643
if( SMALL_OBJ(lb) ) {
645
lw = GC_size_map[lb];
647
lw = ALIGNED_WORDS(lb);
649
opp = &(GC_eobjfreelist[lw]);
651
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
653
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
654
if (0 == op) return 0;
656
lw = GC_size_map[lb]; /* May have been uninitialized. */
661
GC_words_allocd += lw;
665
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_explicit_kind);
667
lw = BYTES_TO_WORDS(GC_size(op));
670
((word *)op)[lw - 1] = d;
674
#if defined(__STDC__) || defined(__cplusplus)
675
void * GC_malloc_explicitly_typed_ignore_off_page(size_t lb, GC_descr d)
677
char * GC_malloc_explicitly_typed_ignore_off_page(lb, d)
683
register ptr_t * opp;
687
lb += TYPD_EXTRA_BYTES;
688
if( SMALL_OBJ(lb) ) {
690
lw = GC_size_map[lb];
692
lw = ALIGNED_WORDS(lb);
694
opp = &(GC_eobjfreelist[lw]);
696
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
698
op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
700
lw = GC_size_map[lb]; /* May have been uninitialized. */
705
GC_words_allocd += lw;
709
op = (ptr_t)GENERAL_MALLOC_IOP(lb, GC_explicit_kind);
711
lw = BYTES_TO_WORDS(GC_size(op));
714
((word *)op)[lw - 1] = d;
718
#if defined(__STDC__) || defined(__cplusplus)
719
void * GC_calloc_explicitly_typed(size_t n,
723
char * GC_calloc_explicitly_typed(n, lb, d)
730
register ptr_t * opp;
732
GC_descr simple_descr;
733
complex_descriptor *complex_descr;
734
register int descr_type;
735
struct LeafDescriptor leaf;
738
descr_type = GC_make_array_descriptor((word)n, (word)lb, d,
739
&simple_descr, &complex_descr, &leaf);
741
case NO_MEM: return(0);
742
case SIMPLE: return(GC_malloc_explicitly_typed(n*lb, simple_descr));
745
lb += sizeof(struct LeafDescriptor) + TYPD_EXTRA_BYTES;
749
lb += TYPD_EXTRA_BYTES;
752
if( SMALL_OBJ(lb) ) {
754
lw = GC_size_map[lb];
756
lw = ALIGNED_WORDS(lb);
758
opp = &(GC_arobjfreelist[lw]);
760
if( !FASTLOCK_SUCCEEDED() || (op = *opp) == 0 ) {
762
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
763
if (0 == op) return(0);
765
lw = GC_size_map[lb]; /* May have been uninitialized. */
770
GC_words_allocd += lw;
774
op = (ptr_t)GENERAL_MALLOC((word)lb, GC_array_kind);
775
if (0 == op) return(0);
776
lw = BYTES_TO_WORDS(GC_size(op));
778
if (descr_type == LEAF) {
779
/* Set up the descriptor inside the object itself. */
780
VOLATILE struct LeafDescriptor * lp =
781
(struct LeafDescriptor *)
783
+ lw - (BYTES_TO_WORDS(sizeof(struct LeafDescriptor)) + 1));
785
lp -> ld_tag = LEAF_TAG;
786
lp -> ld_size = leaf.ld_size;
787
lp -> ld_nelements = leaf.ld_nelements;
788
lp -> ld_descriptor = leaf.ld_descriptor;
789
((VOLATILE word *)op)[lw - 1] = (word)lp;
791
extern unsigned GC_finalization_failures;
792
unsigned ff = GC_finalization_failures;
794
((word *)op)[lw - 1] = (word)complex_descr;
795
/* Make sure the descriptor is cleared once there is any danger */
796
/* it may have been collected. */
798
GC_general_register_disappearing_link((GC_PTR *)
801
if (ff != GC_finalization_failures) {
802
/* Couldn't register it due to lack of memory. Punt. */
803
/* This will probably fail too, but gives the recovery code */
805
return(GC_malloc(n*lb));