1/*
2 * Copyright (C) 2008, 2009, 2013-2016 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
29#ifndef VM_h
30#define VM_h
31
32#include "ControlFlowProfiler.h"
33#include "DateInstanceCache.h"
34#include "ExecutableAllocator.h"
35#include "FunctionHasExecutedCache.h"
36#if ENABLE(JIT)
37#include "GPRInfo.h"
38#endif
39#include "Heap.h"
40#include "Intrinsic.h"
41#include "JITThunks.h"
42#include "JSCJSValue.h"
43#include "JSLock.h"
44#include "LLIntData.h"
45#include "MacroAssemblerCodeRef.h"
46#include "Microtask.h"
47#include "NumericStrings.h"
48#include "PrivateName.h"
49#include "PrototypeMap.h"
50#include "SmallStrings.h"
51#include "SourceCode.h"
52#include "Strong.h"
53#include "ThunkGenerators.h"
54#include "TypedArrayController.h"
55#include "VMEntryRecord.h"
56#include "Watchpoint.h"
57#include <wtf/Bag.h>
58#include <wtf/BumpPointerAllocator.h>
59#include <wtf/DateMath.h>
60#include <wtf/Deque.h>
61#include <wtf/Forward.h>
62#include <wtf/HashMap.h>
63#include <wtf/HashSet.h>
64#include <wtf/SimpleStats.h>
65#include <wtf/StackBounds.h>
66#include <wtf/Stopwatch.h>
67#include <wtf/ThreadSafeRefCounted.h>
68#include <wtf/ThreadSpecific.h>
69#include <wtf/WTFThreadData.h>
70#include <wtf/WeakRandom.h>
71#include <wtf/text/SymbolRegistry.h>
72#include <wtf/text/WTFString.h>
73#if ENABLE(REGEXP_TRACING)
74#include <wtf/ListHashSet.h>
75#endif
76
77namespace JSC {
78
79class BuiltinExecutables;
80class BytecodeIntrinsicRegistry;
81class CodeBlock;
82class CodeCache;
83class CommonIdentifiers;
84class CustomGetterSetter;
85class ExecState;
86class Exception;
87class HandleStack;
88class TypeProfiler;
89class TypeProfilerLog;
90class Identifier;
91class Interpreter;
92class JSBoundSlotBaseFunction;
93class JSGlobalObject;
94class JSObject;
95class LLIntOffsetsExtractor;
96class LegacyProfiler;
97class NativeExecutable;
98class RegExpCache;
99class RegisterAtOffsetList;
100#if ENABLE(SAMPLING_PROFILER)
101class SamplingProfiler;
102#endif
103class ScriptExecutable;
104class SourceProvider;
105class SourceProviderCache;
106struct StackFrame;
107class Structure;
108#if ENABLE(REGEXP_TRACING)
109class RegExp;
110#endif
111class UnlinkedCodeBlock;
112class UnlinkedEvalCodeBlock;
113class UnlinkedFunctionExecutable;
114class UnlinkedProgramCodeBlock;
115class UnlinkedModuleProgramCodeBlock;
116class VirtualRegister;
117class VMEntryScope;
118class Watchdog;
119class Watchpoint;
120class WatchpointSet;
121
122#if ENABLE(DFG_JIT)
123namespace DFG {
124class LongLivedState;
125}
126#endif // ENABLE(DFG_JIT)
127#if ENABLE(FTL_JIT)
128namespace FTL {
129class Thunks;
130}
131#endif // ENABLE(FTL_JIT)
132namespace CommonSlowPaths {
133struct ArityCheckData;
134}
135namespace Profiler {
136class Database;
137}
138
139struct HashTable;
140struct Instruction;
141
142struct LocalTimeOffsetCache {
143 LocalTimeOffsetCache()
144 : start(0.0)
145 , end(-1.0)
146 , increment(0.0)
147 , timeType(WTF::UTCTime)
148 {
149 }
150
151 void reset()
152 {
153 offset = LocalTimeOffset();
154 start = 0.0;
155 end = -1.0;
156 increment = 0.0;
157 timeType = WTF::UTCTime;
158 }
159
160 LocalTimeOffset offset;
161 double start;
162 double end;
163 double increment;
164 WTF::TimeType timeType;
165};
166
167class QueuedTask {
168 WTF_MAKE_NONCOPYABLE(QueuedTask);
169 WTF_MAKE_FAST_ALLOCATED;
170public:
171 void run();
172
173 QueuedTask(VM& vm, JSGlobalObject* globalObject, PassRefPtr<Microtask> microtask)
174 : m_globalObject(vm, globalObject)
175 , m_microtask(microtask)
176 {
177 }
178
179private:
180 Strong<JSGlobalObject> m_globalObject;
181 RefPtr<Microtask> m_microtask;
182};
183
184class ConservativeRoots;
185
186#if COMPILER(MSVC)
187#pragma warning(push)
188#pragma warning(disable: 4200) // Disable "zero-sized array in struct/union" warning
189#endif
190struct ScratchBuffer {
191 ScratchBuffer()
192 {
193 u.m_activeLength = 0;
194 }
195
196 static ScratchBuffer* create(size_t size)
197 {
198 ScratchBuffer* result = new (fastMalloc(ScratchBuffer::allocationSize(size))) ScratchBuffer;
199
200 return result;
201 }
202
203 static size_t allocationSize(size_t bufferSize) { return sizeof(ScratchBuffer) + bufferSize; }
204 void setActiveLength(size_t activeLength) { u.m_activeLength = activeLength; }
205 size_t activeLength() const { return u.m_activeLength; };
206 size_t* activeLengthPtr() { return &u.m_activeLength; };
207 void* dataBuffer() { return m_buffer; }
208
209 union {
210 size_t m_activeLength;
211 double pad; // Make sure m_buffer is double aligned.
212 } u;
213#if CPU(MIPS) && (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == 2)
214 void* m_buffer[0] __attribute__((aligned(8)));
215#else
216 void* m_buffer[0];
217#endif
218};
219#if COMPILER(MSVC)
220#pragma warning(pop)
221#endif
222
223class VM : public ThreadSafeRefCounted<VM> {
224public:
225 // WebCore has a one-to-one mapping of threads to VMs;
226 // either create() or createLeaked() should only be called once
227 // on a thread, this is the 'default' VM (it uses the
228 // thread's default string uniquing table from wtfThreadData).
229 // API contexts created using the new context group aware interface
230 // create APIContextGroup objects which require less locking of JSC
231 // than the old singleton APIShared VM created for use by
232 // the original API.
233 enum VMType { Default, APIContextGroup, APIShared };
234
235 struct ClientData {
236 JS_EXPORT_PRIVATE virtual ~ClientData() = 0;
237 };
238
239 bool isSharedInstance() { return vmType == APIShared; }
240 bool usingAPI() { return vmType != Default; }
241 JS_EXPORT_PRIVATE static bool sharedInstanceExists();
242 JS_EXPORT_PRIVATE static VM& sharedInstance();
243
244 JS_EXPORT_PRIVATE static Ref<VM> create(HeapType = SmallHeap);
245 JS_EXPORT_PRIVATE static Ref<VM> createLeaked(HeapType = SmallHeap);
246 static Ref<VM> createContextGroup(HeapType = SmallHeap);
247 JS_EXPORT_PRIVATE ~VM();
248
249 JS_EXPORT_PRIVATE Watchdog& ensureWatchdog();
250 Watchdog* watchdog() { return m_watchdog.get(); }
251
252#if ENABLE(SAMPLING_PROFILER)
253 SamplingProfiler* samplingProfiler() { return m_samplingProfiler.get(); }
254 JS_EXPORT_PRIVATE void ensureSamplingProfiler(RefPtr<Stopwatch>&&);
255#endif
256
257private:
258 RefPtr<JSLock> m_apiLock;
259
260public:
261#if ENABLE(ASSEMBLER)
262 // executableAllocator should be destructed after the heap, as the heap can call executableAllocator
263 // in its destructor.
264 ExecutableAllocator executableAllocator;
265#endif
266
267 // The heap should be just after executableAllocator and before other members to ensure that it's
268 // destructed after all the objects that reference it.
269 Heap heap;
270
271#if ENABLE(DFG_JIT)
272 std::unique_ptr<DFG::LongLivedState> dfgState;
273#endif // ENABLE(DFG_JIT)
274
275 VMType vmType;
276 ClientData* clientData;
277 VMEntryFrame* topVMEntryFrame;
278 ExecState* topCallFrame;
279 Strong<Structure> structureStructure;
280 Strong<Structure> structureRareDataStructure;
281 Strong<Structure> terminatedExecutionErrorStructure;
282 Strong<Structure> stringStructure;
283 Strong<Structure> notAnObjectStructure;
284 Strong<Structure> propertyNameIteratorStructure;
285 Strong<Structure> propertyNameEnumeratorStructure;
286 Strong<Structure> getterSetterStructure;
287 Strong<Structure> customGetterSetterStructure;
288 Strong<Structure> scopedArgumentsTableStructure;
289 Strong<Structure> apiWrapperStructure;
290 Strong<Structure> JSScopeStructure;
291 Strong<Structure> executableStructure;
292 Strong<Structure> nativeExecutableStructure;
293 Strong<Structure> evalExecutableStructure;
294 Strong<Structure> programExecutableStructure;
295 Strong<Structure> functionExecutableStructure;
296#if ENABLE(WEBASSEMBLY)
297 Strong<Structure> webAssemblyExecutableStructure;
298#endif
299 Strong<Structure> moduleProgramExecutableStructure;
300 Strong<Structure> regExpStructure;
301 Strong<Structure> symbolStructure;
302 Strong<Structure> symbolTableStructure;
303 Strong<Structure> structureChainStructure;
304 Strong<Structure> sparseArrayValueMapStructure;
305 Strong<Structure> templateRegistryKeyStructure;
306 Strong<Structure> arrayBufferNeuteringWatchpointStructure;
307 Strong<Structure> unlinkedFunctionExecutableStructure;
308 Strong<Structure> unlinkedProgramCodeBlockStructure;
309 Strong<Structure> unlinkedEvalCodeBlockStructure;
310 Strong<Structure> unlinkedFunctionCodeBlockStructure;
311 Strong<Structure> unlinkedModuleProgramCodeBlockStructure;
312 Strong<Structure> propertyTableStructure;
313 Strong<Structure> weakMapDataStructure;
314 Strong<Structure> inferredValueStructure;
315 Strong<Structure> inferredTypeStructure;
316 Strong<Structure> inferredTypeTableStructure;
317 Strong<Structure> functionRareDataStructure;
318 Strong<Structure> generatorFrameStructure;
319 Strong<Structure> exceptionStructure;
320 Strong<Structure> promiseDeferredStructure;
321 Strong<Structure> internalPromiseDeferredStructure;
322 Strong<Structure> nativeStdFunctionCellStructure;
323 Strong<Structure> programCodeBlockStructure;
324 Strong<Structure> moduleProgramCodeBlockStructure;
325 Strong<Structure> evalCodeBlockStructure;
326 Strong<Structure> functionCodeBlockStructure;
327 Strong<Structure> webAssemblyCodeBlockStructure;
328
329 Strong<JSCell> iterationTerminator;
330 Strong<JSCell> emptyPropertyNameEnumerator;
331
332 AtomicStringTable* m_atomicStringTable;
333 WTF::SymbolRegistry m_symbolRegistry;
334 CommonIdentifiers* propertyNames;
335 const MarkedArgumentBuffer* emptyList; // Lists are supposed to be allocated on the stack to have their elements properly marked, which is not the case here - but this list has nothing to mark.
336 SmallStrings smallStrings;
337 NumericStrings numericStrings;
338 DateInstanceCache dateInstanceCache;
339 WTF::SimpleStats machineCodeBytesPerBytecodeWordForBaselineJIT;
340 WeakGCMap<std::pair<CustomGetterSetter*, int>, JSBoundSlotBaseFunction> customGetterSetterFunctionMap;
341 WeakGCMap<StringImpl*, JSString, PtrHash<StringImpl*>> stringCache;
342 Strong<JSString> lastCachedString;
343
344 AtomicStringTable* atomicStringTable() const { return m_atomicStringTable; }
345 WTF::SymbolRegistry& symbolRegistry() { return m_symbolRegistry; }
346
347 void setInDefineOwnProperty(bool inDefineOwnProperty)
348 {
349 m_inDefineOwnProperty = inDefineOwnProperty;
350 }
351
352 bool isInDefineOwnProperty()
353 {
354 return m_inDefineOwnProperty;
355 }
356
357 LegacyProfiler* enabledProfiler() { return m_enabledProfiler; }
358 void setEnabledProfiler(LegacyProfiler*);
359
360 void* enabledProfilerAddress() { return &m_enabledProfiler; }
361
362#if ENABLE(JIT)
363 bool canUseJIT() { return m_canUseJIT; }
364#else
365 bool canUseJIT() { return false; } // interpreter only
366#endif
367
368#if ENABLE(YARR_JIT)
369 bool canUseRegExpJIT() { return m_canUseRegExpJIT; }
370#else
371 bool canUseRegExpJIT() { return false; } // interpreter only
372#endif
373
374 SourceProviderCache* addSourceProviderCache(SourceProvider*);
375 void clearSourceProviderCaches();
376
377 PrototypeMap prototypeMap;
378
379 typedef HashMap<RefPtr<SourceProvider>, RefPtr<SourceProviderCache>> SourceProviderCacheMap;
380 SourceProviderCacheMap sourceProviderCacheMap;
381 Interpreter* interpreter;
382#if ENABLE(JIT)
383#if NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
384 intptr_t calleeSaveRegistersBuffer[NUMBER_OF_CALLEE_SAVES_REGISTERS];
385
386 static ptrdiff_t calleeSaveRegistersBufferOffset()
387 {
388 return OBJECT_OFFSETOF(VM, calleeSaveRegistersBuffer);
389 }
390#endif // NUMBER_OF_CALLEE_SAVES_REGISTERS > 0
391
392 std::unique_ptr<JITThunks> jitStubs;
393 MacroAssemblerCodeRef getCTIStub(ThunkGenerator generator)
394 {
395 return jitStubs->ctiStub(this, generator);
396 }
397 NativeExecutable* getHostFunction(NativeFunction, Intrinsic, const String& name);
398
399 std::unique_ptr<RegisterAtOffsetList> allCalleeSaveRegisterOffsets;
400
401 RegisterAtOffsetList* getAllCalleeSaveRegisterOffsets() { return allCalleeSaveRegisterOffsets.get(); }
402
403#endif // ENABLE(JIT)
404 std::unique_ptr<CommonSlowPaths::ArityCheckData> arityCheckData;
405#if ENABLE(FTL_JIT)
406 std::unique_ptr<FTL::Thunks> ftlThunks;
407#endif
408 NativeExecutable* getHostFunction(NativeFunction, NativeFunction constructor, const String& name);
409
410 static ptrdiff_t exceptionOffset()
411 {
412 return OBJECT_OFFSETOF(VM, m_exception);
413 }
414
415 static ptrdiff_t callFrameForCatchOffset()
416 {
417 return OBJECT_OFFSETOF(VM, callFrameForCatch);
418 }
419
420 static ptrdiff_t targetMachinePCForThrowOffset()
421 {
422 return OBJECT_OFFSETOF(VM, targetMachinePCForThrow);
423 }
424
425 void restorePreviousException(Exception* exception) { setException(exception); }
426
427 void clearException() { m_exception = nullptr; }
428 void clearLastException() { m_lastException = nullptr; }
429
430 ExecState** addressOfCallFrameForCatch() { return &callFrameForCatch; }
431
432 Exception* exception() const { return m_exception; }
433 JSCell** addressOfException() { return reinterpret_cast<JSCell**>(&m_exception); }
434
435 Exception* lastException() const { return m_lastException; }
436 JSCell** addressOfLastException() { return reinterpret_cast<JSCell**>(&m_lastException); }
437
438 JS_EXPORT_PRIVATE void throwException(ExecState*, Exception*);
439 JS_EXPORT_PRIVATE JSValue throwException(ExecState*, JSValue);
440 JS_EXPORT_PRIVATE JSObject* throwException(ExecState*, JSObject*);
441
442 void setFailNextNewCodeBlock() { m_failNextNewCodeBlock = true; }
443 bool getAndClearFailNextNewCodeBlock()
444 {
445 bool result = m_failNextNewCodeBlock;
446 m_failNextNewCodeBlock = false;
447 return result;
448 }
449
450 void* stackPointerAtVMEntry() const { return m_stackPointerAtVMEntry; }
451 void setStackPointerAtVMEntry(void*);
452
453 size_t reservedZoneSize() const { return m_reservedZoneSize; }
454 size_t updateReservedZoneSize(size_t reservedZoneSize);
455
456#if ENABLE(FTL_JIT)
457 void updateFTLLargestStackSize(size_t);
458 void** addressOfFTLStackLimit() { return &m_ftlStackLimit; }
459#endif
460
461#if !ENABLE(JIT)
462 void* jsStackLimit() { return m_jsStackLimit; }
463 void setJSStackLimit(void* limit) { m_jsStackLimit = limit; }
464#endif
465 void* stackLimit() { return m_stackLimit; }
466 void** addressOfStackLimit() { return &m_stackLimit; }
467
468 bool isSafeToRecurse(size_t neededStackInBytes = 0) const
469 {
470 ASSERT(wtfThreadData().stack().isGrowingDownward());
471 int8_t* curr = reinterpret_cast<int8_t*>(&curr);
472 int8_t* limit = reinterpret_cast<int8_t*>(m_stackLimit);
473 return curr >= limit && static_cast<size_t>(curr - limit) >= neededStackInBytes;
474 }
475
476 void* lastStackTop() { return m_lastStackTop; }
477 void setLastStackTop(void*);
478
479 const ClassInfo* const jsArrayClassInfo;
480 const ClassInfo* const jsFinalObjectClassInfo;
481
482 JSValue hostCallReturnValue;
483 unsigned varargsLength;
484 ExecState* newCallFrameReturnValue;
485 ExecState* callFrameForCatch;
486 void* targetMachinePCForThrow;
487 Instruction* targetInterpreterPCForThrow;
488 uint32_t osrExitIndex;
489 void* osrExitJumpDestination;
490 Vector<ScratchBuffer*> scratchBuffers;
491 size_t sizeOfLastScratchBuffer;
492
493 bool isExecutingInRegExpJIT { false };
494
495 ScratchBuffer* scratchBufferForSize(size_t size)
496 {
497 if (!size)
498 return 0;
499
500 if (size > sizeOfLastScratchBuffer) {
501 // Protect against a N^2 memory usage pathology by ensuring
502 // that at worst, we get a geometric series, meaning that the
503 // total memory usage is somewhere around
504 // max(scratch buffer size) * 4.
505 sizeOfLastScratchBuffer = size * 2;
506
507 ScratchBuffer* newBuffer = ScratchBuffer::create(sizeOfLastScratchBuffer);
508 RELEASE_ASSERT(newBuffer);
509 scratchBuffers.append(newBuffer);
510 }
511
512 ScratchBuffer* result = scratchBuffers.last();
513 result->setActiveLength(0);
514 return result;
515 }
516
517 EncodedJSValue* exceptionFuzzingBuffer(size_t size)
518 {
519 ASSERT(Options::useExceptionFuzz());
520 if (!m_exceptionFuzzBuffer)
521 m_exceptionFuzzBuffer = MallocPtr<EncodedJSValue>::malloc(size);
522 return m_exceptionFuzzBuffer.get();
523 }
524
525 void gatherConservativeRoots(ConservativeRoots&);
526
527 VMEntryScope* entryScope;
528
529 JSObject* stringRecursionCheckFirstObject { nullptr };
530 HashSet<JSObject*> stringRecursionCheckVisitedObjects;
531
532 LocalTimeOffsetCache localTimeOffsetCache;
533
534 String cachedDateString;
535 double cachedDateStringValue;
536
537 std::unique_ptr<Profiler::Database> m_perBytecodeProfiler;
538 RefPtr<TypedArrayController> m_typedArrayController;
539 RegExpCache* m_regExpCache;
540 BumpPointerAllocator m_regExpAllocator;
541
542#if ENABLE(REGEXP_TRACING)
543 typedef ListHashSet<RegExp*> RTTraceList;
544 RTTraceList* m_rtTraceList;
545#endif
546
547 bool hasExclusiveThread() const { return m_apiLock->hasExclusiveThread(); }
548 std::thread::id exclusiveThread() const { return m_apiLock->exclusiveThread(); }
549 void setExclusiveThread(std::thread::id threadId) { m_apiLock->setExclusiveThread(threadId); }
550
551 JS_EXPORT_PRIVATE void resetDateCache();
552
553 JS_EXPORT_PRIVATE void startSampling();
554 JS_EXPORT_PRIVATE void stopSampling();
555 JS_EXPORT_PRIVATE void dumpSampleData(ExecState*);
556 RegExpCache* regExpCache() { return m_regExpCache; }
557#if ENABLE(REGEXP_TRACING)
558 void addRegExpToTrace(RegExp*);
559#endif
560 JS_EXPORT_PRIVATE void dumpRegExpTrace();
561
562 bool isCollectorBusy() { return heap.isBusy(); }
563
564#if ENABLE(GC_VALIDATION)
565 bool isInitializingObject() const;
566 void setInitializingObjectClass(const ClassInfo*);
567#endif
568
569 bool currentThreadIsHoldingAPILock() const { return m_apiLock->currentThreadIsHoldingLock(); }
570
571 JSLock& apiLock() { return *m_apiLock; }
572 CodeCache* codeCache() { return m_codeCache.get(); }
573
574 JS_EXPORT_PRIVATE void whenIdle(std::function<void()>);
575
576 JS_EXPORT_PRIVATE void deleteAllCode();
577 JS_EXPORT_PRIVATE void deleteAllLinkedCode();
578
579 void registerWatchpointForImpureProperty(const Identifier&, Watchpoint*);
580
581 // FIXME: Use AtomicString once it got merged with Identifier.
582 JS_EXPORT_PRIVATE void addImpureProperty(const String&);
583
584 BuiltinExecutables* builtinExecutables() { return m_builtinExecutables.get(); }
585
586 bool enableTypeProfiler();
587 bool disableTypeProfiler();
588 TypeProfilerLog* typeProfilerLog() { return m_typeProfilerLog.get(); }
589 TypeProfiler* typeProfiler() { return m_typeProfiler.get(); }
590 JS_EXPORT_PRIVATE void dumpTypeProfilerData();
591
592 FunctionHasExecutedCache* functionHasExecutedCache() { return &m_functionHasExecutedCache; }
593
594 ControlFlowProfiler* controlFlowProfiler() { return m_controlFlowProfiler.get(); }
595 bool enableControlFlowProfiler();
596 bool disableControlFlowProfiler();
597
598 JS_EXPORT_PRIVATE void queueMicrotask(JSGlobalObject*, PassRefPtr<Microtask>);
599 JS_EXPORT_PRIVATE void drainMicrotasks();
600 void setShouldRewriteConstAsVar(bool shouldRewrite) { m_shouldRewriteConstAsVar = shouldRewrite; }
601 ALWAYS_INLINE bool shouldRewriteConstAsVar() { return m_shouldRewriteConstAsVar; }
602
603 inline bool shouldTriggerTermination(ExecState*);
604
605 void setShouldBuildPCToCodeOriginMapping() { m_shouldBuildPCToCodeOriginMapping = true; }
606 bool shouldBuilderPCToCodeOriginMapping() const { return m_shouldBuildPCToCodeOriginMapping; }
607
608 BytecodeIntrinsicRegistry& bytecodeIntrinsicRegistry() { return *m_bytecodeIntrinsicRegistry; }
609
610private:
611 friend class LLIntOffsetsExtractor;
612 friend class ClearExceptionScope;
613
614 VM(VMType, HeapType);
615 static VM*& sharedInstanceInternal();
616 void createNativeThunk();
617
618 void updateStackLimit();
619
620 void setException(Exception* exception)
621 {
622 m_exception = exception;
623 m_lastException = exception;
624 }
625
626#if ENABLE(ASSEMBLER)
627 bool m_canUseAssembler;
628#endif
629#if ENABLE(JIT)
630 bool m_canUseJIT;
631#endif
632#if ENABLE(YARR_JIT)
633 bool m_canUseRegExpJIT;
634#endif
635#if ENABLE(GC_VALIDATION)
636 const ClassInfo* m_initializingObjectClass;
637#endif
638 void* m_stackPointerAtVMEntry;
639 size_t m_reservedZoneSize;
640#if !ENABLE(JIT)
641 struct {
642 void* m_stackLimit;
643 void* m_jsStackLimit;
644 };
645#else
646 union {
647 void* m_stackLimit;
648 void* m_jsStackLimit;
649 };
650#if ENABLE(FTL_JIT)
651 void* m_ftlStackLimit;
652 size_t m_largestFTLStackSize;
653#endif
654#endif
655 void* m_lastStackTop;
656 Exception* m_exception { nullptr };
657 Exception* m_lastException { nullptr };
658 bool m_failNextNewCodeBlock { false };
659 bool m_inDefineOwnProperty;
660 bool m_shouldRewriteConstAsVar { false };
661 bool m_shouldBuildPCToCodeOriginMapping { false };
662 std::unique_ptr<CodeCache> m_codeCache;
663 LegacyProfiler* m_enabledProfiler;
664 std::unique_ptr<BuiltinExecutables> m_builtinExecutables;
665 HashMap<String, RefPtr<WatchpointSet>> m_impurePropertyWatchpointSets;
666 std::unique_ptr<TypeProfiler> m_typeProfiler;
667 std::unique_ptr<TypeProfilerLog> m_typeProfilerLog;
668 unsigned m_typeProfilerEnabledCount;
669 FunctionHasExecutedCache m_functionHasExecutedCache;
670 std::unique_ptr<ControlFlowProfiler> m_controlFlowProfiler;
671 unsigned m_controlFlowProfilerEnabledCount;
672 Deque<std::unique_ptr<QueuedTask>> m_microtaskQueue;
673 MallocPtr<EncodedJSValue> m_exceptionFuzzBuffer;
674 RefPtr<Watchdog> m_watchdog;
675#if ENABLE(SAMPLING_PROFILER)
676 RefPtr<SamplingProfiler> m_samplingProfiler;
677#endif
678 std::unique_ptr<BytecodeIntrinsicRegistry> m_bytecodeIntrinsicRegistry;
679};
680
681#if ENABLE(GC_VALIDATION)
682inline bool VM::isInitializingObject() const
683{
684 return !!m_initializingObjectClass;
685}
686
687inline void VM::setInitializingObjectClass(const ClassInfo* initializingObjectClass)
688{
689 m_initializingObjectClass = initializingObjectClass;
690}
691#endif
692
693inline Heap* WeakSet::heap() const
694{
695 return &m_vm->heap;
696}
697
698#if ENABLE(JIT)
699extern "C" void sanitizeStackForVMImpl(VM*);
700#endif
701
702void sanitizeStackForVM(VM*);
703void logSanitizeStack(VM*);
704
705} // namespace JSC
706
707#endif // VM_h
708