1/*
2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003-2009, 2013-2015 Apple Inc. All rights reserved.
5 *
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
10 *
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
15 *
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19 *
20 */
21
22#ifndef Heap_h
23#define Heap_h
24
25#include "ArrayBuffer.h"
26#include "CodeBlockSet.h"
27#include "CopyVisitor.h"
28#include "GCIncomingRefCountedSet.h"
29#include "HandleSet.h"
30#include "HandleStack.h"
31#include "HeapObserver.h"
32#include "HeapOperation.h"
33#include "JITStubRoutineSet.h"
34#include "ListableHandler.h"
35#include "MachineStackMarker.h"
36#include "MarkedAllocator.h"
37#include "MarkedBlock.h"
38#include "MarkedBlockSet.h"
39#include "MarkedSpace.h"
40#include "Options.h"
41#include "SlotVisitor.h"
42#include "StructureIDTable.h"
43#include "TinyBloomFilter.h"
44#include "UnconditionalFinalizer.h"
45#include "WeakHandleOwner.h"
46#include "WeakReferenceHarvester.h"
47#include "WriteBarrierBuffer.h"
48#include "WriteBarrierSupport.h"
49#include <wtf/HashCountedSet.h>
50#include <wtf/HashSet.h>
51#include <wtf/ParallelHelperPool.h>
52
53namespace JSC {
54
55class CodeBlock;
56class CopiedSpace;
57class EdenGCActivityCallback;
58class ExecutableBase;
59class FullGCActivityCallback;
60class GCActivityCallback;
61class GCAwareJITStubRoutine;
62class Heap;
63class HeapRootVisitor;
64class HeapVerifier;
65class IncrementalSweeper;
66class JITStubRoutine;
67class JSCell;
68class JSStack;
69class JSValue;
70class LLIntOffsetsExtractor;
71class MarkedArgumentBuffer;
72class VM;
73
74namespace DFG {
75class SpeculativeJIT;
76class Worklist;
77}
78
79static void* const zombifiedBits = reinterpret_cast<void*>(static_cast<uintptr_t>(0xdeadbeef));
80
81typedef HashCountedSet<JSCell*> ProtectCountSet;
82typedef HashCountedSet<const char*> TypeCountSet;
83
84enum HeapType { SmallHeap, LargeHeap };
85
86class Heap {
87 WTF_MAKE_NONCOPYABLE(Heap);
88public:
89 friend class JIT;
90 friend class DFG::SpeculativeJIT;
91 static Heap* heap(const JSValue); // 0 for immediate values
92 static Heap* heap(const JSCell*);
93
94 // This constant determines how many blocks we iterate between checks of our
95 // deadline when calling Heap::isPagedOut. Decreasing it will cause us to detect
96 // overstepping our deadline more quickly, while increasing it will cause
97 // our scan to run faster.
98 static const unsigned s_timeCheckResolution = 16;
99
100 static bool isLive(const void*);
101 static bool isMarked(const void*);
102 static bool testAndSetMarked(const void*);
103 static void setMarked(const void*);
104
105 // This function must be run after stopAllocation() is called and
106 // before liveness data is cleared to be accurate.
107 static bool isPointerGCObject(TinyBloomFilter, MarkedBlockSet&, void* pointer);
108 static bool isValueGCObject(TinyBloomFilter, MarkedBlockSet&, JSValue);
109
110 void writeBarrier(const JSCell*);
111 void writeBarrier(const JSCell*, JSValue);
112 void writeBarrier(const JSCell*, JSCell*);
113
114 JS_EXPORT_PRIVATE static void* copyBarrier(const JSCell* owner, void*& copiedSpacePointer);
115
116 WriteBarrierBuffer& writeBarrierBuffer() { return m_writeBarrierBuffer; }
117 void flushWriteBarrierBuffer(JSCell*);
118
119 Heap(VM*, HeapType);
120 ~Heap();
121 void lastChanceToFinalize();
122 void releaseDelayedReleasedObjects();
123
124 VM* vm() const { return m_vm; }
125 MarkedSpace& objectSpace() { return m_objectSpace; }
126 CopiedSpace& storageSpace() { return m_storageSpace; }
127 MachineThreads& machineThreads() { return m_machineThreads; }
128
129 const SlotVisitor& slotVisitor() const { return m_slotVisitor; }
130
131 JS_EXPORT_PRIVATE GCActivityCallback* fullActivityCallback();
132 JS_EXPORT_PRIVATE GCActivityCallback* edenActivityCallback();
133 JS_EXPORT_PRIVATE void setFullActivityCallback(PassRefPtr<FullGCActivityCallback>);
134 JS_EXPORT_PRIVATE void setEdenActivityCallback(PassRefPtr<EdenGCActivityCallback>);
135 JS_EXPORT_PRIVATE void setGarbageCollectionTimerEnabled(bool);
136
137 JS_EXPORT_PRIVATE IncrementalSweeper* sweeper();
138 JS_EXPORT_PRIVATE void setIncrementalSweeper(std::unique_ptr<IncrementalSweeper>);
139
140 void addObserver(HeapObserver* observer) { m_observers.append(observer); }
141 void removeObserver(HeapObserver* observer) { m_observers.removeFirst(observer); }
142
143 // true if collection is in progress
144 bool isCollecting();
145 HeapOperation operationInProgress() { return m_operationInProgress; }
146 // true if an allocation or collection is in progress
147 bool isBusy();
148 MarkedSpace::Subspace& subspaceForObjectWithoutDestructor() { return m_objectSpace.subspaceForObjectsWithoutDestructor(); }
149 MarkedSpace::Subspace& subspaceForObjectDestructor() { return m_objectSpace.subspaceForObjectsWithDestructor(); }
150 template<typename ClassType> MarkedSpace::Subspace& subspaceForObjectOfType();
151 MarkedAllocator& allocatorForObjectWithoutDestructor(size_t bytes) { return m_objectSpace.allocatorFor(bytes); }
152 MarkedAllocator& allocatorForObjectWithDestructor(size_t bytes) { return m_objectSpace.destructorAllocatorFor(bytes); }
153 template<typename ClassType> MarkedAllocator& allocatorForObjectOfType(size_t bytes);
154 CopiedAllocator& storageAllocator() { return m_storageSpace.allocator(); }
155 CheckedBoolean tryAllocateStorage(JSCell* intendedOwner, size_t, void**);
156 CheckedBoolean tryReallocateStorage(JSCell* intendedOwner, void**, size_t, size_t);
157 void ascribeOwner(JSCell* intendedOwner, void*);
158
159 typedef void (*Finalizer)(JSCell*);
160 JS_EXPORT_PRIVATE void addFinalizer(JSCell*, Finalizer);
161 void addExecutable(ExecutableBase*);
162
163 void notifyIsSafeToCollect() { m_isSafeToCollect = true; }
164 bool isSafeToCollect() const { return m_isSafeToCollect; }
165
166 JS_EXPORT_PRIVATE void collectAllGarbageIfNotDoneRecently();
167 void collectAllGarbage() { collectAndSweep(FullCollection); }
168 JS_EXPORT_PRIVATE void collectAndSweep(HeapOperation collectionType = AnyCollection);
169 bool shouldCollect();
170 JS_EXPORT_PRIVATE void collect(HeapOperation collectionType = AnyCollection);
171 bool collectIfNecessaryOrDefer(); // Returns true if it did collect.
172
173 void completeAllDFGPlans();
174
175 // Use this API to report non-GC memory referenced by GC objects. Be sure to
176 // call both of these functions: Calling only one may trigger catastropic
177 // memory growth.
178 void reportExtraMemoryAllocated(size_t);
179 void reportExtraMemoryVisited(CellState cellStateBeforeVisiting, size_t);
180
181 // Use this API to report non-GC memory if you can't use the better API above.
182 void deprecatedReportExtraMemory(size_t);
183
184 JS_EXPORT_PRIVATE void reportAbandonedObjectGraph();
185
186 JS_EXPORT_PRIVATE void protect(JSValue);
187 JS_EXPORT_PRIVATE bool unprotect(JSValue); // True when the protect count drops to 0.
188
189 JS_EXPORT_PRIVATE size_t extraMemorySize(); // Non-GC memory referenced by GC objects.
190 JS_EXPORT_PRIVATE size_t size();
191 JS_EXPORT_PRIVATE size_t capacity();
192 JS_EXPORT_PRIVATE size_t objectCount();
193 JS_EXPORT_PRIVATE size_t globalObjectCount();
194 JS_EXPORT_PRIVATE size_t protectedObjectCount();
195 JS_EXPORT_PRIVATE size_t protectedGlobalObjectCount();
196 JS_EXPORT_PRIVATE std::unique_ptr<TypeCountSet> protectedObjectTypeCounts();
197 JS_EXPORT_PRIVATE std::unique_ptr<TypeCountSet> objectTypeCounts();
198
199 HashSet<MarkedArgumentBuffer*>& markListSet();
200
201 template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
202 template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
203 template<typename Functor> void forEachCodeBlock(Functor&);
204
205 HandleSet* handleSet() { return &m_handleSet; }
206 HandleStack* handleStack() { return &m_handleStack; }
207
208 void willStartIterating();
209 void didFinishIterating();
210
211 double lastFullGCLength() const { return m_lastFullGCLength; }
212 double lastEdenGCLength() const { return m_lastEdenGCLength; }
213 void increaseLastFullGCLength(double amount) { m_lastFullGCLength += amount; }
214
215 size_t sizeBeforeLastEdenCollection() const { return m_sizeBeforeLastEdenCollect; }
216 size_t sizeAfterLastEdenCollection() const { return m_sizeAfterLastEdenCollect; }
217 size_t sizeBeforeLastFullCollection() const { return m_sizeBeforeLastFullCollect; }
218 size_t sizeAfterLastFullCollection() const { return m_sizeAfterLastFullCollect; }
219
220 void deleteAllCodeBlocks();
221 void deleteAllUnlinkedCodeBlocks();
222
223 void didAllocate(size_t);
224 void didAbandon(size_t);
225
226 bool isPagedOut(double deadline);
227
228 const JITStubRoutineSet& jitStubRoutines() { return m_jitStubRoutines; }
229
230 void addReference(JSCell*, ArrayBuffer*);
231
232 bool isDeferred() const { return !!m_deferralDepth || !Options::useGC(); }
233
234 StructureIDTable& structureIDTable() { return m_structureIDTable; }
235
236 CodeBlockSet& codeBlockSet() { return m_codeBlocks; }
237
238#if USE(CF)
239 template<typename T> void releaseSoon(RetainPtr<T>&&);
240#endif
241
242 static bool isZombified(JSCell* cell) { return *(void**)cell == zombifiedBits; }
243
244 void registerWeakGCMap(void* weakGCMap, std::function<void()> pruningCallback);
245 void unregisterWeakGCMap(void* weakGCMap);
246
247 void addLogicallyEmptyWeakBlock(WeakBlock*);
248
249#if ENABLE(RESOURCE_USAGE)
250 size_t blockBytesAllocated() const { return m_blockBytesAllocated; }
251#endif
252
253 void didAllocateBlock(size_t capacity);
254 void didFreeBlock(size_t capacity);
255
256private:
257 friend class CodeBlock;
258 friend class CopiedBlock;
259 friend class DeferGC;
260 friend class DeferGCForAWhile;
261 friend class GCAwareJITStubRoutine;
262 friend class GCLogging;
263 friend class GCThread;
264 friend class HandleSet;
265 friend class HeapVerifier;
266 friend class JITStubRoutine;
267 friend class LLIntOffsetsExtractor;
268 friend class MarkedSpace;
269 friend class MarkedAllocator;
270 friend class MarkedBlock;
271 friend class CopiedSpace;
272 friend class CopyVisitor;
273 friend class SlotVisitor;
274 friend class IncrementalSweeper;
275 friend class HeapStatistics;
276 friend class VM;
277 friend class WeakSet;
278 template<typename T> friend void* allocateCell(Heap&);
279 template<typename T> friend void* allocateCell(Heap&, size_t);
280
281 void* allocateWithDestructor(size_t); // For use with objects with destructors.
282 void* allocateWithoutDestructor(size_t); // For use with objects without destructors.
283 template<typename ClassType> void* allocateObjectOfType(size_t); // Chooses one of the methods above based on type.
284
285 static const size_t minExtraMemory = 256;
286
287 class FinalizerOwner : public WeakHandleOwner {
288 virtual void finalize(Handle<Unknown>, void* context) override;
289 };
290
291 JS_EXPORT_PRIVATE bool isValidAllocation(size_t);
292 JS_EXPORT_PRIVATE void reportExtraMemoryAllocatedSlowCase(size_t);
293 JS_EXPORT_PRIVATE void deprecatedReportExtraMemorySlowCase(size_t);
294
295 void collectImpl(HeapOperation, void* stackOrigin, void* stackTop, MachineThreads::RegisterState&);
296
297 void suspendCompilerThreads();
298 void willStartCollection(HeapOperation collectionType);
299 void flushOldStructureIDTables();
300 void flushWriteBarrierBuffer();
301 void stopAllocation();
302
303 void markRoots(double gcStartTime, void* stackOrigin, void* stackTop, MachineThreads::RegisterState&);
304 void gatherStackRoots(ConservativeRoots&, void* stackOrigin, void* stackTop, MachineThreads::RegisterState&);
305 void gatherJSStackRoots(ConservativeRoots&);
306 void gatherScratchBufferRoots(ConservativeRoots&);
307 void clearLivenessData();
308 void visitExternalRememberedSet();
309 void visitSmallStrings();
310 void visitConservativeRoots(ConservativeRoots&);
311 void visitCompilerWorklistWeakReferences();
312 void removeDeadCompilerWorklistEntries();
313 void visitProtectedObjects(HeapRootVisitor&);
314 void visitArgumentBuffers(HeapRootVisitor&);
315 void visitException(HeapRootVisitor&);
316 void visitStrongHandles(HeapRootVisitor&);
317 void visitHandleStack(HeapRootVisitor&);
318 void visitSamplingProfiler();
319 void traceCodeBlocksAndJITStubRoutines();
320 void converge();
321 void visitWeakHandles(HeapRootVisitor&);
322 void updateObjectCounts(double gcStartTime);
323 void resetVisitors();
324
325 void reapWeakHandles();
326 void pruneStaleEntriesFromWeakGCMaps();
327 void sweepArrayBuffers();
328 void snapshotMarkedSpace();
329 void deleteSourceProviderCaches();
330 void notifyIncrementalSweeper();
331 void writeBarrierCurrentlyExecutingCodeBlocks();
332 void resetAllocators();
333 void copyBackingStores();
334 void harvestWeakReferences();
335 void finalizeUnconditionalFinalizers();
336 void clearUnmarkedExecutables();
337 void deleteUnmarkedCompiledCode();
338 JS_EXPORT_PRIVATE void addToRememberedSet(const JSCell*);
339 void updateAllocationLimits();
340 void didFinishCollection(double gcStartTime);
341 void resumeCompilerThreads();
342 void zombifyDeadObjects();
343 void markDeadObjects();
344
345 void sweepAllLogicallyEmptyWeakBlocks();
346 bool sweepNextLogicallyEmptyWeakBlock();
347
348 bool shouldDoFullCollection(HeapOperation requestedCollectionType) const;
349
350 JSStack& stack();
351
352 void incrementDeferralDepth();
353 void decrementDeferralDepth();
354 void decrementDeferralDepthAndGCIfNeeded();
355
356 size_t threadVisitCount();
357 size_t threadBytesVisited();
358 size_t threadBytesCopied();
359
360 const HeapType m_heapType;
361 const size_t m_ramSize;
362 const size_t m_minBytesPerCycle;
363 size_t m_sizeAfterLastCollect;
364 size_t m_sizeAfterLastFullCollect;
365 size_t m_sizeBeforeLastFullCollect;
366 size_t m_sizeAfterLastEdenCollect;
367 size_t m_sizeBeforeLastEdenCollect;
368
369 size_t m_bytesAllocatedThisCycle;
370 size_t m_bytesAbandonedSinceLastFullCollect;
371 size_t m_maxEdenSize;
372 size_t m_maxHeapSize;
373 bool m_shouldDoFullCollection;
374 size_t m_totalBytesVisited;
375 size_t m_totalBytesVisitedThisCycle;
376 size_t m_totalBytesCopied;
377 size_t m_totalBytesCopiedThisCycle;
378
379 HeapOperation m_operationInProgress;
380 StructureIDTable m_structureIDTable;
381 MarkedSpace m_objectSpace;
382 CopiedSpace m_storageSpace;
383 GCIncomingRefCountedSet<ArrayBuffer> m_arrayBuffers;
384 size_t m_extraMemorySize;
385 size_t m_deprecatedExtraMemorySize;
386
387 HashSet<const JSCell*> m_copyingRememberedSet;
388
389 ProtectCountSet m_protectedValues;
390 std::unique_ptr<HashSet<MarkedArgumentBuffer*>> m_markListSet;
391
392 MachineThreads m_machineThreads;
393
394 SlotVisitor m_slotVisitor;
395
396 // We pool the slot visitors used by parallel marking threads. It's useful to be able to
397 // enumerate over them, and it's useful to have them cache some small amount of memory from
398 // one GC to the next. GC marking threads claim these at the start of marking, and return
399 // them at the end.
400 Vector<std::unique_ptr<SlotVisitor>> m_parallelSlotVisitors;
401 Vector<SlotVisitor*> m_availableParallelSlotVisitors;
402 Lock m_parallelSlotVisitorLock;
403
404 HandleSet m_handleSet;
405 HandleStack m_handleStack;
406 CodeBlockSet m_codeBlocks;
407 JITStubRoutineSet m_jitStubRoutines;
408 FinalizerOwner m_finalizerOwner;
409
410 bool m_isSafeToCollect;
411
412 WriteBarrierBuffer m_writeBarrierBuffer;
413
414 VM* m_vm;
415 double m_lastFullGCLength;
416 double m_lastEdenGCLength;
417
418 Vector<ExecutableBase*> m_executables;
419
420 Vector<WeakBlock*> m_logicallyEmptyWeakBlocks;
421 size_t m_indexOfNextLogicallyEmptyWeakBlockToSweep { WTF::notFound };
422
423 RefPtr<FullGCActivityCallback> m_fullActivityCallback;
424 RefPtr<GCActivityCallback> m_edenActivityCallback;
425 std::unique_ptr<IncrementalSweeper> m_sweeper;
426 Vector<MarkedBlock*> m_blockSnapshot;
427
428 Vector<HeapObserver*> m_observers;
429
430 unsigned m_deferralDepth;
431 Vector<DFG::Worklist*> m_suspendedCompilerWorklists;
432
433 std::unique_ptr<HeapVerifier> m_verifier;
434#if USE(CF)
435 Vector<RetainPtr<CFTypeRef>> m_delayedReleaseObjects;
436 unsigned m_delayedReleaseRecursionCount;
437#endif
438
439 HashMap<void*, std::function<void()>> m_weakGCMaps;
440
441 Lock m_markingMutex;
442 Condition m_markingConditionVariable;
443 MarkStackArray m_sharedMarkStack;
444 unsigned m_numberOfActiveParallelMarkers { 0 };
445 unsigned m_numberOfWaitingParallelMarkers { 0 };
446 bool m_parallelMarkersShouldExit { false };
447
448 Lock m_opaqueRootsMutex;
449 HashSet<void*> m_opaqueRoots;
450
451 Vector<CopiedBlock*> m_blocksToCopy;
452 static const size_t s_blockFragmentLength = 32;
453
454 ListableHandler<WeakReferenceHarvester>::List m_weakReferenceHarvesters;
455 ListableHandler<UnconditionalFinalizer>::List m_unconditionalFinalizers;
456
457 ParallelHelperClient m_helperClient;
458
459#if ENABLE(RESOURCE_USAGE)
460 size_t m_blockBytesAllocated { 0 };
461#endif
462};
463
464} // namespace JSC
465
466#endif // Heap_h
467