2
* Copyright (C) 2016-2019 Apple Inc. All rights reserved.
4
* Redistribution and use in source and binary forms, with or without
5
* modification, are permitted provided that the following conditions
7
* 1. Redistributions of source code must retain the above copyright
8
* notice, this list of conditions and the following disclaimer.
9
* 2. Redistributions in binary form must reproduce the above copyright
10
* notice, this list of conditions and the following disclaimer in the
11
* documentation and/or other materials provided with the distribution.
13
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
14
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
15
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
17
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
18
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
19
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
20
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
21
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
22
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
23
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
#include "PreciseAllocation.h"
29
#include "AlignedMemoryAllocator.h"
31
#include "IsoCellSetInlines.h"
32
#include "JSCInlines.h"
33
#include "Operations.h"
34
#include "SubspaceInlines.h"
38
static inline bool isAlignedForPreciseAllocation(void* memory)
40
uintptr_t allocatedPointer = bitwise_cast<uintptr_t>(memory);
41
return !(allocatedPointer & (PreciseAllocation::alignment - 1));
44
PreciseAllocation* PreciseAllocation::tryCreate(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace)
46
if (validateDFGDoesGC)
47
RELEASE_ASSERT(heap.expectDoesGC());
49
size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
50
static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
52
// We must use tryAllocateMemory instead of tryAllocateAlignedMemory since we want to use "realloc" feature.
53
void* space = subspace->alignedMemoryAllocator()->tryAllocateMemory(adjustedAlignmentAllocationSize);
57
bool adjustedAlignment = false;
58
if (!isAlignedForPreciseAllocation(space)) {
59
space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
60
adjustedAlignment = true;
61
ASSERT(isAlignedForPreciseAllocation(space));
64
if (scribbleFreeCells())
65
scribble(space, size);
66
return new (NotNull, space) PreciseAllocation(heap, size, subspace, indexInSpace, adjustedAlignment);
69
PreciseAllocation* PreciseAllocation::tryReallocate(size_t size, Subspace* subspace)
71
ASSERT(!isLowerTier());
72
size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
73
static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
75
ASSERT(subspace == m_subspace);
77
unsigned oldCellSize = m_cellSize;
78
bool oldAdjustedAlignment = m_adjustedAlignment;
79
void* oldBasePointer = basePointer();
81
void* newBasePointer = subspace->alignedMemoryAllocator()->tryReallocateMemory(oldBasePointer, adjustedAlignmentAllocationSize);
85
PreciseAllocation* newAllocation = bitwise_cast<PreciseAllocation*>(newBasePointer);
86
bool newAdjustedAlignment = false;
87
if (!isAlignedForPreciseAllocation(newBasePointer)) {
88
newAdjustedAlignment = true;
89
newAllocation = bitwise_cast<PreciseAllocation*>(bitwise_cast<uintptr_t>(newBasePointer) + halfAlignment);
90
ASSERT(isAlignedForPreciseAllocation(static_cast<void*>(newAllocation)));
93
// We have 4 patterns.
94
// oldAdjustedAlignment = true newAdjustedAlignment = true => Do nothing.
95
// oldAdjustedAlignment = true newAdjustedAlignment = false => Shift forward by halfAlignment
96
// oldAdjustedAlignment = false newAdjustedAlignment = true => Shift backward by halfAlignment
97
// oldAdjustedAlignment = false newAdjustedAlignment = false => Do nothing.
99
if (oldAdjustedAlignment != newAdjustedAlignment) {
100
if (oldAdjustedAlignment) {
101
ASSERT(!newAdjustedAlignment);
102
ASSERT(newAllocation == newBasePointer);
103
// Old [ 8 ][ content ]
104
// Now [ ][ content ]
105
// New [ content ]...
106
memmove(newBasePointer, bitwise_cast<char*>(newBasePointer) + halfAlignment, oldCellSize + PreciseAllocation::headerSize());
108
ASSERT(newAdjustedAlignment);
109
ASSERT(newAllocation != newBasePointer);
110
ASSERT(newAllocation == bitwise_cast<void*>(bitwise_cast<char*>(newBasePointer) + halfAlignment));
112
// Now [ content ][ ]
113
// New [ 8 ][ content ]
114
memmove(bitwise_cast<char*>(newBasePointer) + halfAlignment, newBasePointer, oldCellSize + PreciseAllocation::headerSize());
118
newAllocation->m_cellSize = size;
119
newAllocation->m_adjustedAlignment = newAdjustedAlignment;
120
return newAllocation;
124
PreciseAllocation* PreciseAllocation::createForLowerTier(Heap& heap, size_t size, Subspace* subspace, uint8_t lowerTierIndex)
126
if (validateDFGDoesGC)
127
RELEASE_ASSERT(heap.expectDoesGC());
129
size_t adjustedAlignmentAllocationSize = headerSize() + size + halfAlignment;
130
static_assert(halfAlignment == 8, "We assume that memory returned by malloc has alignment >= 8.");
132
void* space = subspace->alignedMemoryAllocator()->tryAllocateMemory(adjustedAlignmentAllocationSize);
133
RELEASE_ASSERT(space);
135
bool adjustedAlignment = false;
136
if (!isAlignedForPreciseAllocation(space)) {
137
space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(space) + halfAlignment);
138
adjustedAlignment = true;
139
ASSERT(isAlignedForPreciseAllocation(space));
142
if (scribbleFreeCells())
143
scribble(space, size);
144
PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
145
preciseAllocation->m_lowerTierIndex = lowerTierIndex;
146
return preciseAllocation;
149
PreciseAllocation* PreciseAllocation::reuseForLowerTier()
151
Heap& heap = *this->heap();
152
size_t size = m_cellSize;
153
Subspace* subspace = m_subspace;
154
bool adjustedAlignment = m_adjustedAlignment;
155
uint8_t lowerTierIndex = m_lowerTierIndex;
156
void* basePointer = this->basePointer();
158
this->~PreciseAllocation();
160
void* space = basePointer;
161
ASSERT((!isAlignedForPreciseAllocation(basePointer)) == adjustedAlignment);
162
if (adjustedAlignment)
163
space = bitwise_cast<void*>(bitwise_cast<uintptr_t>(basePointer) + halfAlignment);
165
PreciseAllocation* preciseAllocation = new (NotNull, space) PreciseAllocation(heap, size, subspace, 0, adjustedAlignment);
166
preciseAllocation->m_lowerTierIndex = lowerTierIndex;
167
preciseAllocation->m_hasValidCell = false;
168
return preciseAllocation;
171
PreciseAllocation::PreciseAllocation(Heap& heap, size_t size, Subspace* subspace, unsigned indexInSpace, bool adjustedAlignment)
172
: m_indexInSpace(indexInSpace)
174
, m_isNewlyAllocated(true)
175
, m_hasValidCell(true)
176
, m_adjustedAlignment(adjustedAlignment)
177
, m_attributes(subspace->attributes())
178
, m_subspace(subspace)
179
, m_weakSet(heap.vm())
182
ASSERT(cell()->isPreciseAllocation());
185
PreciseAllocation::~PreciseAllocation()
191
void PreciseAllocation::lastChanceToFinalize()
193
m_weakSet.lastChanceToFinalize();
195
clearNewlyAllocated();
199
void PreciseAllocation::shrink()
204
void PreciseAllocation::visitWeakSet(SlotVisitor& visitor)
206
m_weakSet.visit(visitor);
209
void PreciseAllocation::reapWeakSet()
211
return m_weakSet.reap();
214
void PreciseAllocation::flip()
216
ASSERT(heap()->collectionScope() == CollectionScope::Full);
220
bool PreciseAllocation::isEmpty()
222
return !isMarked() && m_weakSet.isEmpty() && !isNewlyAllocated();
225
void PreciseAllocation::sweep()
229
if (m_hasValidCell && !isLive()) {
230
if (m_attributes.destruction == NeedsDestruction)
231
m_subspace->destroy(vm(), static_cast<JSCell*>(cell()));
232
// We should clear IsoCellSet's bit before actually destroying PreciseAllocation
233
// since PreciseAllocation's destruction can be delayed until its WeakSet is cleared.
235
static_cast<IsoSubspace*>(m_subspace)->clearIsoCellSetBit(this);
236
m_hasValidCell = false;
240
void PreciseAllocation::destroy()
242
AlignedMemoryAllocator* allocator = m_subspace->alignedMemoryAllocator();
243
void* basePointer = this->basePointer();
244
this->~PreciseAllocation();
245
allocator->freeMemory(basePointer);
248
void PreciseAllocation::dump(PrintStream& out) const
250
out.print(RawPointer(this), ":(cell at ", RawPointer(cell()), " with size ", m_cellSize, " and attributes ", m_attributes, ")");
254
void PreciseAllocation::assertValidCell(VM& vm, HeapCell* cell) const
256
ASSERT(&vm == &this->vm());
257
ASSERT(cell == this->cell());
258
ASSERT(m_hasValidCell);