2
#define GMP_EXTERN extern
4
#ifndef GMP_EXTERN_INLINE
5
#define GMP_EXTERN_INLINE GMP_EXTERN __inline__
8
GMP_EXTERN jmp_buf gmp_jmp;
9
GMP_EXTERN int jmp_gmp,gmp_relocatable;
11
#define join(a_,b_) a_ ## b_
12
#define Join(a_,b_) join(a_,b_)
14
#define P1(bt_) bt_ _b
15
#define P2(bt_,ct_) P1(bt_),ct_ _c
16
#define P3(bt_,ct_,dt_) P2(bt_,ct_),dt_ _d
17
#define P4(bt_,ct_,dt_,et_) P3(bt_,ct_,dt_),et_ _e
24
/*FIXME : this is slightly excessively conservative as it includes
25
comparisons with possible non mpz_t type arguments*/
26
#define E21 _b==(void *)_c
27
#define E31 E21||_b==(void *)_d
28
#define E42 _b==_d||_b==_e||_c==_d||_c==_e
35
FEerror("gmp jmp loop in" #a_, 0);\
40
#define RF_gmp_ulint unsigned long int
41
#define RD_gmp_ulint RF_gmp_ulint GMP_TMP
42
#define RA_gmp_ulint GMP_TMP =
43
#define RR_gmp_ulint GMP_TMP
45
#define RF_gmp_lint long int
46
#define RD_gmp_lint RF_gmp_lint GMP_TMP
47
#define RA_gmp_lint GMP_TMP =
48
#define RR_gmp_lint GMP_TMP
51
#define RD_int RF_int GMP_TMP
52
#define RA_int GMP_TMP =
53
#define RR_int GMP_TMP
55
#define RF_gmp_char_star char *
56
#define RD_gmp_char_star RF_gmp_char_star GMP_TMP
57
#define RA_gmp_char_star GMP_TMP =
58
#define RR_gmp_char_star GMP_TMP
60
#define RF_double double
61
#define RD_double RF_double GMP_TMP
62
#define RA_double GMP_TMP =
63
#define RR_double GMP_TMP
65
#define RF_size_t size_t
66
#define RD_size_t RF_size_t GMP_TMP
67
#define RA_size_t GMP_TMP =
68
#define RR_size_t GMP_TMP
75
/* GMP_WRAPPERS: the gmp library uses heap allocation in places for
76
temporary storage. This greatly complicates relocatable bignum
77
allocation in GCL, which is a big winner in terms of performance.
78
The old procedure was to patch gmp to use alloca in such instances.
79
Aside from possible silently introducing bugs as gmp evolves, such
80
a policy also runs the risk of colliding with gmp's stated policy
81
of storing pointers in allocated blocks, a possiblity GCL's
82
conservative garbage collector is not designed to handle. Here we
83
implement a policy of preventing garbage collection inside of gmp
84
calls in any case. In case of non-inplace calls, where source and
85
destination arguments are distinct, we simply longjmp back to the
86
front of the call if a gbc would be needed and try the call again,
87
as any previous partial write into the destination is of no
88
consequence. Just as is the case with the alloc_contblock and
89
alloc_relblock algorithms themselves, on the second pass (as
90
indicated by jmp_gmp) new pages are added if there is still not
91
enough room in lieu of GBC. In case of in-place calls, we schedule
92
a GBC call after the gmp call completes, relying on the allocator
93
to add pages immediately to the type to satisfy the allocation when
94
necessary. jmp_gmp counts the pass for non-in-place calls, and is
95
set to -1 otherwise. 20040815 CM*/
97
#define MEM_GMP_CALL(n_,rt_,a_,s_,b_...) \
98
GMP_EXTERN_INLINE Join(RF_,rt_) Join(m,a_)(Join(P,n_)(b_)) { \
101
if (gmp_relocatable) {\
103
if ((j=setjmp(gmp_jmp))) \
105
if (Join(Join(E,n_),s_)) jmp_gmp=-1 ; else jmp_gmp++;\
107
Join(RA_,rt_) a_(Join(A,n_));\
108
if (gmp_relocatable) {\
109
if (jmp_gmp<-1) GBC(-jmp_gmp);\
112
return Join(RR_,rt_);\
115
MEM_GMP_CALL(3,void,mpz_add,1,mpz_t,mpz_t,mpz_t)
116
MEM_GMP_CALL(3,void,mpz_add_ui,1,mpz_t,mpz_t,unsigned long int)
117
MEM_GMP_CALL(3,void,mpz_sub,1,mpz_t,mpz_t,mpz_t)
118
MEM_GMP_CALL(3,void,mpz_sub_ui,1,mpz_t,mpz_t,unsigned long int)
119
MEM_GMP_CALL(3,void,mpz_mul,1,mpz_t,mpz_t,mpz_t)
120
MEM_GMP_CALL(3,void,mpz_mul_si,1,mpz_t,mpz_t,long int)
121
MEM_GMP_CALL(3,void,mpz_mul_2exp,1,mpz_t,mpz_t,unsigned long int)
122
MEM_GMP_CALL(2,void,mpz_neg,1,mpz_t,mpz_t)
123
MEM_GMP_CALL(4,void,mpz_tdiv_qr,2,mpz_t,mpz_t,mpz_t,mpz_t)
124
MEM_GMP_CALL(3,void,mpz_fdiv_q_2exp,1,mpz_t,mpz_t,unsigned long int)
125
MEM_GMP_CALL(2,int,mpz_cmp,0,mpz_t,mpz_t)
126
MEM_GMP_CALL(3,void,mpz_and,1,mpz_t,mpz_t,mpz_t)
127
MEM_GMP_CALL(3,void,mpz_xor,1,mpz_t,mpz_t,mpz_t)
128
MEM_GMP_CALL(3,void,mpz_ior,1,mpz_t,mpz_t,mpz_t)
129
MEM_GMP_CALL(2,void,mpz_com,1,mpz_t,mpz_t)
130
MEM_GMP_CALL(2,int,mpz_tstbit,0,mpz_t,unsigned long int)
131
MEM_GMP_CALL(1,void,mpz_init,1,mpz_t)
132
MEM_GMP_CALL(2,void,mpz_set,1,mpz_t,mpz_t)
133
MEM_GMP_CALL(2,void,mpz_set_ui,1,mpz_t,unsigned long int)
134
MEM_GMP_CALL(2,void,mpz_set_si,1,mpz_t,long int)
135
MEM_GMP_CALL(1,double,mpz_get_d,0,mpz_t)
136
MEM_GMP_CALL(1,gmp_lint,mpz_get_si,0,mpz_t)
137
MEM_GMP_CALL(3,gmp_char_star,mpz_get_str,0,char *,int,mpz_t)
138
MEM_GMP_CALL(1,int,mpz_fits_sint_p,0,mpz_t)
139
MEM_GMP_CALL(1,gmp_ulint,mpz_popcount,0,mpz_t)
140
/*MEM_GMP_CALL(2,void *,mpz_realloc,mpz_t,mp_size_t)*/
141
MEM_GMP_CALL(1,size_t,mpz_size,0,mpz_t)
142
MEM_GMP_CALL(2,size_t,mpz_sizeinbase,0,mpz_t,int)
144
/* FIXME: find a way to have this follow the convention in gmp.h*/
146
#define __gmpz_add m__gmpz_add
147
#define __gmpz_add_ui m__gmpz_add_ui
148
#define __gmpz_sub m__gmpz_sub
149
#define __gmpz_sub_ui m__gmpz_sub_ui
150
#define __gmpz_mul m__gmpz_mul
151
#define __gmpz_mul_si m__gmpz_mul_si
152
#define __gmpz_mul_2exp m__gmpz_mul_2exp
153
#define __gmpz_neg m__gmpz_neg
154
#define __gmpz_tdiv_qr m__gmpz_tdiv_qr
155
#define __gmpz_fdiv_q_2exp m__gmpz_fdiv_q_2exp
156
#define __gmpz_cmp m__gmpz_cmp
157
#define __gmpz_and m__gmpz_and
158
#define __gmpz_xor m__gmpz_xor
159
#define __gmpz_ior m__gmpz_ior
160
#define __gmpz_com m__gmpz_com
161
#define __gmpz_tstbit m__gmpz_tstbit
162
#define __gmpz_init m__gmpz_init
163
#define __gmpz_set m__gmpz_set
164
#define __gmpz_set_ui m__gmpz_set_ui
165
#define __gmpz_set_si m__gmpz_set_si
166
#define __gmpz_get_d m__gmpz_get_d
167
#define __gmpz_get_si m__gmpz_get_si
168
#define __gmpz_get_str m__gmpz_get_str
169
#define __gmpz_fits_sint_p m__gmpz_fits_sint_p
170
#define __gmpz_popcount m__gmpz_popcount
171
/*#define __gmpz_realloc m__gmpz_realloc*/
172
#define __gmpz_size m__gmpz_size
173
#define __gmpz_sizeinbase m__gmpz_sizeinbase