1/*
2 * Copyright (C) 2014, 2015 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 *
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
14 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
15 * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
16 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
17 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
18 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
19 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
20 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
21 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
22 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
23 * THE POSSIBILITY OF SUCH DAMAGE.
24 */
25
26#ifndef CopyBarrier_h
27#define CopyBarrier_h
28
29#include "Heap.h"
30
31namespace JSC {
32
33enum class CopyState {
34 // The backing store is not planned to get copied in this epoch. If you keep a pointer to the backing
35 // store on the stack, it will not get copied. If you don't keep it on the stack, it may get copied
36 // starting at the next handshake (that is, it may transition from ToSpace to CopyPlanned, but
37 // CopyPlanned means ToSpace prior to the handshake that starts the copy phase).
38 ToSpace,
39
40 // The marking phase has selected this backing store to be copied. If we are not yet in the copying
41 // phase, this backing store is still in to-space. All that is needed in such a case is to mask off the
42 // low bits. If we are in the copying phase, this means that the object points to from-space. The
43 // barrier should first copy the object - or wait for copying to finish - before using the object.
44 CopyPlanned,
45
46 // The object is being copied right now. Anyone wanting to use the object must wait for the object to
47 // finish being copied. Notifications about copying use the ParkingLot combined with these bits. If the
48 // state is CopyingAndWaiting, then when the copying finishes, whatever thread was doing it will
49 // unparkAll() on the address of the CopyBarrierBase. So, to wait for copying to finish, CAS this to
50 // CopyingAndWaiting and then parkConditionally on the barrier address.
51 Copying,
52
53 // The object is being copied right now, and there are one or more threads parked. Those threads want
54 // to be unparked when copying is done. So, whichever thread does the copying needs to call unparkAll()
55 // on the barrier address after copying is done.
56 CopyingAndWaiting
57};
58
59class CopyBarrierBase {
60public:
61 static const unsigned spaceBits = 3;
62
63 CopyBarrierBase()
64 : m_value(nullptr)
65 {
66 }
67
68 bool operator!() const { return !m_value; }
69
70 explicit operator bool() const { return m_value; }
71
72 void* getWithoutBarrier() const
73 {
74 return m_value;
75 }
76
77 // Use this version of get() if you only want to execute the barrier slow path if some condition
78 // holds, and you only want to evaluate that condition after first checking the barrier's
79 // condition. Usually, you just want to use get().
80 template<typename Functor>
81 void* getPredicated(const JSCell* owner, const Functor& functor) const
82 {
83 void* result = m_value;
84 if (UNLIKELY(bitwise_cast<uintptr_t>(result) & spaceBits)) {
85 if (functor())
86 return Heap::copyBarrier(owner, m_value);
87 }
88 return result;
89 }
90
91 // When we are in the concurrent copying phase, this method may lock the barrier object (i.e. the field
92 // pointing to copied space) and call directly into the owning object's copyBackingStore() method.
93 void* get(const JSCell* owner) const
94 {
95 return getPredicated(owner, [] () -> bool { return true; });
96 }
97
98 CopyState copyState() const
99 {
100 return static_cast<CopyState>(bitwise_cast<uintptr_t>(m_value) & spaceBits);
101 }
102
103 // This only works when you know that there is nobody else concurrently messing with this CopyBarrier.
104 // That's hard to guarantee, though there are a few unusual places where this ends up being safe.
105 // Usually you want to use CopyBarrier::weakCAS().
106 void setCopyState(CopyState copyState)
107 {
108 WTF::storeStoreFence();
109 uintptr_t value = bitwise_cast<uintptr_t>(m_value);
110 value &= ~static_cast<uintptr_t>(spaceBits);
111 value |= static_cast<uintptr_t>(copyState);
112 m_value = bitwise_cast<void*>(value);
113 }
114
115 void clear() { m_value = nullptr; }
116
117protected:
118 CopyBarrierBase(VM& vm, const JSCell* owner, void* value)
119 {
120 this->set(vm, owner, value);
121 }
122
123 void set(VM& vm, const JSCell* owner, void* value)
124 {
125 this->m_value = value;
126 vm.heap.writeBarrier(owner);
127 }
128
129 void setWithoutBarrier(void* value)
130 {
131 this->m_value = value;
132 }
133
134 bool weakCASWithoutBarrier(
135 void* oldPointer, CopyState oldCopyState, void* newPointer, CopyState newCopyState)
136 {
137 uintptr_t oldValue = bitwise_cast<uintptr_t>(oldPointer) | static_cast<uintptr_t>(oldCopyState);
138 uintptr_t newValue = bitwise_cast<uintptr_t>(newPointer) | static_cast<uintptr_t>(newCopyState);
139 return WTF::weakCompareAndSwap(
140 &m_value, bitwise_cast<void*>(oldValue), bitwise_cast<void*>(newValue));
141 }
142
143private:
144 mutable void* m_value;
145};
146
147template <typename T>
148class CopyBarrier : public CopyBarrierBase {
149public:
150 CopyBarrier()
151 {
152 }
153
154 CopyBarrier(VM& vm, const JSCell* owner, T& value)
155 : CopyBarrierBase(vm, owner, &value)
156 {
157 }
158
159 CopyBarrier(VM& vm, const JSCell* owner, T* value)
160 : CopyBarrierBase(vm, owner, value)
161 {
162 }
163
164 T* getWithoutBarrier() const
165 {
166 return bitwise_cast<T*>(CopyBarrierBase::getWithoutBarrier());
167 }
168
169 T* get(const JSCell* owner) const
170 {
171 return bitwise_cast<T*>(CopyBarrierBase::get(owner));
172 }
173
174 template<typename Functor>
175 T* getPredicated(const JSCell* owner, const Functor& functor) const
176 {
177 return bitwise_cast<T*>(CopyBarrierBase::getPredicated(owner, functor));
178 }
179
180 void set(VM& vm, const JSCell* owner, T* value)
181 {
182 CopyBarrierBase::set(vm, owner, value);
183 }
184
185 void setWithoutBarrier(T* value)
186 {
187 CopyBarrierBase::setWithoutBarrier(value);
188 }
189
190 bool weakCASWithoutBarrier(T* oldPointer, CopyState oldCopyState, T* newPointer, CopyState newCopyState)
191 {
192 return CopyBarrierBase::weakCASWithoutBarrier(oldPointer, oldCopyState, newPointer, newCopyState);
193 }
194};
195
196} // namespace JSC
197
198#endif // CopyBarrier_h
199