1 | /* |
---|---|

2 | * Copyright (C) 1999-2000 Harri Porten (porten@kde.org) |

3 | * Copyright (C) 2001 Peter Kelly (pmk@post.com) |

4 | * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2011 Apple Inc. All rights reserved. |

5 | * |

6 | * This library is free software; you can redistribute it and/or |

7 | * modify it under the terms of the GNU Lesser General Public |

8 | * License as published by the Free Software Foundation; either |

9 | * version 2 of the License, or (at your option) any later version. |

10 | * |

11 | * This library is distributed in the hope that it will be useful, |

12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |

13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |

14 | * Lesser General Public License for more details. |

15 | * |

16 | * You should have received a copy of the GNU Lesser General Public |

17 | * License along with this library; if not, write to the Free Software |

18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |

19 | * |

20 | */ |

21 | |

22 | #ifndef MarkedSpace_h |

23 | #define MarkedSpace_h |

24 | |

25 | #include "MarkedAllocator.h" |

26 | #include "MarkedBlock.h" |

27 | #include "MarkedBlockSet.h" |

28 | #include <array> |

29 | #include <wtf/HashSet.h> |

30 | #include <wtf/Noncopyable.h> |

31 | #include <wtf/RetainPtr.h> |

32 | #include <wtf/Vector.h> |

33 | |

34 | namespace JSC { |

35 | |

36 | class Heap; |

37 | class HeapIterationScope; |

38 | class LLIntOffsetsExtractor; |

39 | |

40 | struct ClearMarks : MarkedBlock::VoidFunctor { |

41 | void operator()(MarkedBlock* block) |

42 | { |

43 | block->clearMarks(); |

44 | } |

45 | }; |

46 | |

47 | struct Sweep : MarkedBlock::VoidFunctor { |

48 | void operator()(MarkedBlock* block) { block->sweep(); } |

49 | }; |

50 | |

51 | struct ZombifySweep : MarkedBlock::VoidFunctor { |

52 | void operator()(MarkedBlock* block) |

53 | { |

54 | if (block->needsSweeping()) |

55 | block->sweep(); |

56 | } |

57 | }; |

58 | |

59 | struct MarkCount : MarkedBlock::CountFunctor { |

60 | void operator()(MarkedBlock* block) { count(block->markCount()); } |

61 | }; |

62 | |

63 | struct Size : MarkedBlock::CountFunctor { |

64 | void operator()(MarkedBlock* block) { count(block->markCount() * block->cellSize()); } |

65 | }; |

66 | |

67 | class MarkedSpace { |

68 | WTF_MAKE_NONCOPYABLE(MarkedSpace); |

69 | public: |

70 | // [ 16 ... 768 ] |

71 | static const size_t preciseStep = MarkedBlock::atomSize; |

72 | static const size_t preciseCutoff = 768; |

73 | static const size_t preciseCount = preciseCutoff / preciseStep; |

74 | |

75 | // [ 1024 ... blockSize/2 ] |

76 | static const size_t impreciseStart = 1024; |

77 | static const size_t impreciseStep = 256; |

78 | static const size_t impreciseCutoff = MarkedBlock::blockSize / 2; |

79 | static const size_t impreciseCount = impreciseCutoff / impreciseStep; |

80 | |

81 | struct Subspace { |

82 | std::array<MarkedAllocator, preciseCount> preciseAllocators; |

83 | std::array<MarkedAllocator, impreciseCount> impreciseAllocators; |

84 | MarkedAllocator largeAllocator; |

85 | }; |

86 | |

87 | MarkedSpace(Heap*); |

88 | ~MarkedSpace(); |

89 | void lastChanceToFinalize(); |

90 | |

91 | MarkedAllocator& allocatorFor(size_t); |

92 | MarkedAllocator& destructorAllocatorFor(size_t); |

93 | void* allocateWithDestructor(size_t); |

94 | void* allocateWithoutDestructor(size_t); |

95 | |

96 | Subspace& subspaceForObjectsWithDestructor() { return m_destructorSpace; } |

97 | Subspace& subspaceForObjectsWithoutDestructor() { return m_normalSpace; } |

98 | |

99 | void resetAllocators(); |

100 | |

101 | void visitWeakSets(HeapRootVisitor&); |

102 | void reapWeakSets(); |

103 | |

104 | MarkedBlockSet& blocks() { return m_blocks; } |

105 | |

106 | void willStartIterating(); |

107 | bool isIterating() const { return m_isIterating; } |

108 | void didFinishIterating(); |

109 | |

110 | void stopAllocating(); |

111 | void resumeAllocating(); // If we just stopped allocation but we didn't do a collection, we need to resume allocation. |

112 | |

113 | typedef HashSet<MarkedBlock*>::iterator BlockIterator; |

114 | |

115 | template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&, Functor&); |

116 | template<typename Functor> typename Functor::ReturnType forEachLiveCell(HeapIterationScope&); |

117 | template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&, Functor&); |

118 | template<typename Functor> typename Functor::ReturnType forEachDeadCell(HeapIterationScope&); |

119 | template<typename Functor> typename Functor::ReturnType forEachBlock(Functor&); |

120 | template<typename Functor> typename Functor::ReturnType forEachBlock(); |

121 | |

122 | void shrink(); |

123 | void freeBlock(MarkedBlock*); |

124 | void freeOrShrinkBlock(MarkedBlock*); |

125 | |

126 | void didAddBlock(MarkedBlock*); |

127 | void didConsumeFreeList(MarkedBlock*); |

128 | void didAllocateInBlock(MarkedBlock*); |

129 | |

130 | void clearMarks(); |

131 | void clearNewlyAllocated(); |

132 | void sweep(); |

133 | void zombifySweep(); |

134 | size_t objectCount(); |

135 | size_t size(); |

136 | size_t capacity(); |

137 | |

138 | bool isPagedOut(double deadline); |

139 | |

140 | const Vector<MarkedBlock*>& blocksWithNewObjects() const { return m_blocksWithNewObjects; } |

141 | |

142 | private: |

143 | friend class LLIntOffsetsExtractor; |

144 | friend class JIT; |

145 | |

146 | template<typename Functor> void forEachAllocator(Functor&); |

147 | template<typename Functor> void forEachAllocator(); |

148 | |

149 | Subspace m_destructorSpace; |

150 | Subspace m_normalSpace; |

151 | |

152 | Heap* m_heap; |

153 | size_t m_capacity; |

154 | bool m_isIterating; |

155 | MarkedBlockSet m_blocks; |

156 | Vector<MarkedBlock*> m_blocksWithNewObjects; |

157 | }; |

158 | |

159 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope&, Functor& functor) |

160 | { |

161 | ASSERT(isIterating()); |

162 | BlockIterator end = m_blocks.set().end(); |

163 | for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) { |

164 | if ((*it)->forEachLiveCell(functor) == IterationStatus::Done) |

165 | break; |

166 | } |

167 | return functor.returnValue(); |

168 | } |

169 | |

170 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachLiveCell(HeapIterationScope& scope) |

171 | { |

172 | Functor functor; |

173 | return forEachLiveCell(scope, functor); |

174 | } |

175 | |

176 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope&, Functor& functor) |

177 | { |

178 | ASSERT(isIterating()); |

179 | BlockIterator end = m_blocks.set().end(); |

180 | for (BlockIterator it = m_blocks.set().begin(); it != end; ++it) { |

181 | if ((*it)->forEachDeadCell(functor) == IterationStatus::Done) |

182 | break; |

183 | } |

184 | return functor.returnValue(); |

185 | } |

186 | |

187 | template<typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachDeadCell(HeapIterationScope& scope) |

188 | { |

189 | Functor functor; |

190 | return forEachDeadCell(scope, functor); |

191 | } |

192 | |

193 | inline MarkedAllocator& MarkedSpace::allocatorFor(size_t bytes) |

194 | { |

195 | ASSERT(bytes); |

196 | if (bytes <= preciseCutoff) |

197 | return m_normalSpace.preciseAllocators[(bytes - 1) / preciseStep]; |

198 | if (bytes <= impreciseCutoff) |

199 | return m_normalSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; |

200 | return m_normalSpace.largeAllocator; |

201 | } |

202 | |

203 | inline MarkedAllocator& MarkedSpace::destructorAllocatorFor(size_t bytes) |

204 | { |

205 | ASSERT(bytes); |

206 | if (bytes <= preciseCutoff) |

207 | return m_destructorSpace.preciseAllocators[(bytes - 1) / preciseStep]; |

208 | if (bytes <= impreciseCutoff) |

209 | return m_destructorSpace.impreciseAllocators[(bytes - 1) / impreciseStep]; |

210 | return m_destructorSpace.largeAllocator; |

211 | } |

212 | |

213 | inline void* MarkedSpace::allocateWithoutDestructor(size_t bytes) |

214 | { |

215 | return allocatorFor(bytes).allocate(bytes); |

216 | } |

217 | |

218 | inline void* MarkedSpace::allocateWithDestructor(size_t bytes) |

219 | { |

220 | return destructorAllocatorFor(bytes).allocate(bytes); |

221 | } |

222 | |

223 | template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock(Functor& functor) |

224 | { |

225 | for (size_t i = 0; i < preciseCount; ++i) |

226 | m_normalSpace.preciseAllocators[i].forEachBlock(functor); |

227 | for (size_t i = 0; i < impreciseCount; ++i) |

228 | m_normalSpace.impreciseAllocators[i].forEachBlock(functor); |

229 | m_normalSpace.largeAllocator.forEachBlock(functor); |

230 | |

231 | for (size_t i = 0; i < preciseCount; ++i) |

232 | m_destructorSpace.preciseAllocators[i].forEachBlock(functor); |

233 | for (size_t i = 0; i < impreciseCount; ++i) |

234 | m_destructorSpace.impreciseAllocators[i].forEachBlock(functor); |

235 | m_destructorSpace.largeAllocator.forEachBlock(functor); |

236 | |

237 | return functor.returnValue(); |

238 | } |

239 | |

240 | template <typename Functor> inline typename Functor::ReturnType MarkedSpace::forEachBlock() |

241 | { |

242 | Functor functor; |

243 | return forEachBlock(functor); |

244 | } |

245 | |

246 | inline void MarkedSpace::didAddBlock(MarkedBlock* block) |

247 | { |

248 | m_capacity += block->capacity(); |

249 | m_blocks.add(block); |

250 | } |

251 | |

252 | inline void MarkedSpace::didAllocateInBlock(MarkedBlock* block) |

253 | { |

254 | m_blocksWithNewObjects.append(block); |

255 | } |

256 | |

257 | inline size_t MarkedSpace::objectCount() |

258 | { |

259 | return forEachBlock<MarkCount>(); |

260 | } |

261 | |

262 | inline size_t MarkedSpace::size() |

263 | { |

264 | return forEachBlock<Size>(); |

265 | } |

266 | |

267 | inline size_t MarkedSpace::capacity() |

268 | { |

269 | return m_capacity; |

270 | } |

271 | |

272 | } // namespace JSC |

273 | |

274 | #endif // MarkedSpace_h |

275 |