1/*
2 * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef HeapInlines_h
27#define HeapInlines_h
28
29#include "CopyBarrier.h"
30#include "Heap.h"
31#include "JSCell.h"
32#include "Structure.h"
33#include <type_traits>
34#include <wtf/Assertions.h>
35
36namespace JSC {
37
38inline bool Heap::shouldCollect()
39{
40 if (isDeferred())
41 return false;
42 if (!m_isSafeToCollect)
43 return false;
44 if (m_operationInProgress != NoOperation)
45 return false;
46 if (Options::gcMaxHeapSize())
47 return m_bytesAllocatedThisCycle > Options::gcMaxHeapSize();
48 return m_bytesAllocatedThisCycle > m_maxEdenSize;
49}
50
51inline bool Heap::isBusy()
52{
53 return m_operationInProgress != NoOperation;
54}
55
56inline bool Heap::isCollecting()
57{
58 return m_operationInProgress == FullCollection || m_operationInProgress == EdenCollection;
59}
60
61inline Heap* Heap::heap(const JSCell* cell)
62{
63 return MarkedBlock::blockFor(cell)->heap();
64}
65
66inline Heap* Heap::heap(const JSValue v)
67{
68 if (!v.isCell())
69 return 0;
70 return heap(v.asCell());
71}
72
73inline bool Heap::isLive(const void* cell)
74{
75 return MarkedBlock::blockFor(cell)->isLiveCell(cell);
76}
77
78inline bool Heap::isMarked(const void* cell)
79{
80 return MarkedBlock::blockFor(cell)->isMarked(cell);
81}
82
83inline bool Heap::testAndSetMarked(const void* cell)
84{
85 return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
86}
87
88inline void Heap::setMarked(const void* cell)
89{
90 MarkedBlock::blockFor(cell)->setMarked(cell);
91}
92
93inline void Heap::writeBarrier(const JSCell* from, JSValue to)
94{
95#if ENABLE(WRITE_BARRIER_PROFILING)
96 WriteBarrierCounters::countWriteBarrier();
97#endif
98 if (!to.isCell())
99 return;
100 writeBarrier(from, to.asCell());
101}
102
103inline void Heap::writeBarrier(const JSCell* from, JSCell* to)
104{
105#if ENABLE(WRITE_BARRIER_PROFILING)
106 WriteBarrierCounters::countWriteBarrier();
107#endif
108 if (!from || from->cellState() != CellState::OldBlack)
109 return;
110 if (!to || to->cellState() != CellState::NewWhite)
111 return;
112 addToRememberedSet(from);
113}
114
115inline void Heap::writeBarrier(const JSCell* from)
116{
117 ASSERT_GC_OBJECT_LOOKS_VALID(const_cast<JSCell*>(from));
118 if (!from || from->cellState() != CellState::OldBlack)
119 return;
120 addToRememberedSet(from);
121}
122
123inline void Heap::reportExtraMemoryAllocated(size_t size)
124{
125 if (size > minExtraMemory)
126 reportExtraMemoryAllocatedSlowCase(size);
127}
128
129inline void Heap::reportExtraMemoryVisited(CellState dataBeforeVisiting, size_t size)
130{
131 // We don't want to double-count the extra memory that was reported in previous collections.
132 if (operationInProgress() == EdenCollection && dataBeforeVisiting == CellState::OldGrey)
133 return;
134
135 size_t* counter = &m_extraMemorySize;
136
137 for (;;) {
138 size_t oldSize = *counter;
139 if (WTF::weakCompareAndSwap(counter, oldSize, oldSize + size))
140 return;
141 }
142}
143
144inline void Heap::deprecatedReportExtraMemory(size_t size)
145{
146 if (size > minExtraMemory)
147 deprecatedReportExtraMemorySlowCase(size);
148}
149
150template<typename Functor> inline void Heap::forEachCodeBlock(Functor& functor)
151{
152 // We don't know the full set of CodeBlocks until compilation has terminated.
153 completeAllDFGPlans();
154
155 return m_codeBlocks.iterate<Functor>(functor);
156}
157
158template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell(Functor& functor)
159{
160 for (auto& pair : m_protectedValues)
161 functor(pair.key);
162 m_handleSet.forEachStrongHandle(functor, m_protectedValues);
163
164 return functor.returnValue();
165}
166
167template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell()
168{
169 Functor functor;
170 return forEachProtectedCell(functor);
171}
172
173inline void* Heap::allocateWithDestructor(size_t bytes)
174{
175#if ENABLE(ALLOCATION_LOGGING)
176 dataLogF("JSC GC allocating %lu bytes with normal destructor.\n", bytes);
177#endif
178 ASSERT(isValidAllocation(bytes));
179 return m_objectSpace.allocateWithDestructor(bytes);
180}
181
182inline void* Heap::allocateWithoutDestructor(size_t bytes)
183{
184#if ENABLE(ALLOCATION_LOGGING)
185 dataLogF("JSC GC allocating %lu bytes without destructor.\n", bytes);
186#endif
187 ASSERT(isValidAllocation(bytes));
188 return m_objectSpace.allocateWithoutDestructor(bytes);
189}
190
191template<typename ClassType>
192void* Heap::allocateObjectOfType(size_t bytes)
193{
194 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
195 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
196
197 if (ClassType::needsDestruction)
198 return allocateWithDestructor(bytes);
199 return allocateWithoutDestructor(bytes);
200}
201
202template<typename ClassType>
203MarkedSpace::Subspace& Heap::subspaceForObjectOfType()
204{
205 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
206 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
207
208 if (ClassType::needsDestruction)
209 return subspaceForObjectDestructor();
210 return subspaceForObjectWithoutDestructor();
211}
212
213template<typename ClassType>
214MarkedAllocator& Heap::allocatorForObjectOfType(size_t bytes)
215{
216 // JSCell::classInfo() expects objects allocated with normal destructor to derive from JSDestructibleObject.
217 ASSERT((!ClassType::needsDestruction || (ClassType::StructureFlags & StructureIsImmortal) || std::is_convertible<ClassType, JSDestructibleObject>::value));
218
219 if (ClassType::needsDestruction)
220 return allocatorForObjectWithDestructor(bytes);
221 return allocatorForObjectWithoutDestructor(bytes);
222}
223
224inline CheckedBoolean Heap::tryAllocateStorage(JSCell* intendedOwner, size_t bytes, void** outPtr)
225{
226 CheckedBoolean result = m_storageSpace.tryAllocate(bytes, outPtr);
227#if ENABLE(ALLOCATION_LOGGING)
228 dataLogF("JSC GC allocating %lu bytes of storage for %p: %p.\n", bytes, intendedOwner, *outPtr);
229#else
230 UNUSED_PARAM(intendedOwner);
231#endif
232 return result;
233}
234
235inline CheckedBoolean Heap::tryReallocateStorage(JSCell* intendedOwner, void** ptr, size_t oldSize, size_t newSize)
236{
237#if ENABLE(ALLOCATION_LOGGING)
238 void* oldPtr = *ptr;
239#endif
240 CheckedBoolean result = m_storageSpace.tryReallocate(ptr, oldSize, newSize);
241#if ENABLE(ALLOCATION_LOGGING)
242 dataLogF("JSC GC reallocating %lu -> %lu bytes of storage for %p: %p -> %p.\n", oldSize, newSize, intendedOwner, oldPtr, *ptr);
243#else
244 UNUSED_PARAM(intendedOwner);
245#endif
246 return result;
247}
248
249inline void Heap::ascribeOwner(JSCell* intendedOwner, void* storage)
250{
251#if ENABLE(ALLOCATION_LOGGING)
252 dataLogF("JSC GC ascribing %p as owner of storage %p.\n", intendedOwner, storage);
253#else
254 UNUSED_PARAM(intendedOwner);
255 UNUSED_PARAM(storage);
256#endif
257}
258
259#if USE(CF)
260template <typename T>
261inline void Heap::releaseSoon(RetainPtr<T>&& object)
262{
263 m_delayedReleaseObjects.append(WTFMove(object));
264}
265#endif
266
267inline void Heap::incrementDeferralDepth()
268{
269 RELEASE_ASSERT(m_deferralDepth < 100); // Sanity check to make sure this doesn't get ridiculous.
270 m_deferralDepth++;
271}
272
273inline void Heap::decrementDeferralDepth()
274{
275 RELEASE_ASSERT(m_deferralDepth >= 1);
276 m_deferralDepth--;
277}
278
279inline bool Heap::collectIfNecessaryOrDefer()
280{
281 if (!shouldCollect())
282 return false;
283
284 collect();
285 return true;
286}
287
288inline void Heap::decrementDeferralDepthAndGCIfNeeded()
289{
290 decrementDeferralDepth();
291 collectIfNecessaryOrDefer();
292}
293
294inline HashSet<MarkedArgumentBuffer*>& Heap::markListSet()
295{
296 if (!m_markListSet)
297 m_markListSet = std::make_unique<HashSet<MarkedArgumentBuffer*>>();
298 return *m_markListSet;
299}
300
301inline void Heap::registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback)
302{
303 m_weakGCMaps.add(weakGCMap, WTFMove(pruningCallback));
304}
305
306inline void Heap::unregisterWeakGCMap(void* weakGCMap)
307{
308 m_weakGCMaps.remove(weakGCMap);
309}
310
311inline void Heap::didAllocateBlock(size_t capacity)
312{
313#if ENABLE(RESOURCE_USAGE)
314 m_blockBytesAllocated += capacity;
315#else
316 UNUSED_PARAM(capacity);
317#endif
318}
319
320inline void Heap::didFreeBlock(size_t capacity)
321{
322#if ENABLE(RESOURCE_USAGE)
323 m_blockBytesAllocated -= capacity;
324#else
325 UNUSED_PARAM(capacity);
326#endif
327}
328
329inline bool Heap::isPointerGCObject(TinyBloomFilter filter, MarkedBlockSet& markedBlockSet, void* pointer)
330{
331 MarkedBlock* candidate = MarkedBlock::blockFor(pointer);
332 if (filter.ruleOut(bitwise_cast<Bits>(candidate))) {
333 ASSERT(!candidate || !markedBlockSet.set().contains(candidate));
334 return false;
335 }
336
337 if (!MarkedBlock::isAtomAligned(pointer))
338 return false;
339
340 if (!markedBlockSet.set().contains(candidate))
341 return false;
342
343 if (!candidate->isLiveCell(pointer))
344 return false;
345
346 return true;
347}
348
349inline bool Heap::isValueGCObject(TinyBloomFilter filter, MarkedBlockSet& markedBlockSet, JSValue value)
350{
351 if (!value.isCell())
352 return false;
353 return isPointerGCObject(filter, markedBlockSet, static_cast<void*>(value.asCell()));
354}
355
356} // namespace JSC
357
358#endif // HeapInlines_h
359