155
154
#ifdef CONFIG_X86_64
157
#define VSYSCALL_ADDR (-10*1024*1024)
159
#define VLOAD_OFFSET (VSYSCALL_ADDR - __vsyscall_0 + LOAD_OFFSET)
160
#define VLOAD(x) (ADDR(x) - VLOAD_OFFSET)
162
#define VVIRT_OFFSET (VSYSCALL_ADDR - __vsyscall_0)
163
#define VVIRT(x) (ADDR(x) - VVIRT_OFFSET)
164
#define EMIT_VVAR(x, offset) .vsyscall_var_ ## x \
165
ADDR(.vsyscall_0) + offset \
166
: AT(VLOAD(.vsyscall_var_ ## x)) { \
167
*(.vsyscall_var_ ## x) \
169
x = VVIRT(.vsyscall_var_ ## x);
175
.vsyscall_0 : AT(VLOAD(.vsyscall_0)) {
179
. = ALIGN(L1_CACHE_BYTES);
180
.vsyscall_fn : AT(VLOAD(.vsyscall_fn)) {
184
.vsyscall_1 ADDR(.vsyscall_0) + 1024: AT(VLOAD(.vsyscall_1)) {
187
.vsyscall_2 ADDR(.vsyscall_0) + 2048: AT(VLOAD(.vsyscall_2)) {
191
.vsyscall_3 ADDR(.vsyscall_0) + 3072: AT(VLOAD(.vsyscall_3)) {
156
. = ALIGN(PAGE_SIZE);
159
.vvar : AT(ADDR(.vvar) - LOAD_OFFSET) {
160
/* work around gold bug 13023 */
161
__vvar_beginning_hack = .;
163
/* Place all vvars at the offsets in asm/vvar.h. */
164
#define EMIT_VVAR(name, offset) \
165
. = __vvar_beginning_hack + offset; \
195
167
#define __VVAR_KERNEL_LDS
196
168
#include <asm/vvar.h>
197
169
#undef __VVAR_KERNEL_LDS
199
. = __vsyscall_0 + PAGE_SIZE;
174
. = ALIGN(__vvar_page + PAGE_SIZE, PAGE_SIZE);
208
176
#endif /* CONFIG_X86_64 */
210
178
/* Init code and data - will be freed after init */