2 * Copyright (C) 1999-2000 Harri Porten (porten@kde.org)
3 * Copyright (C) 2001 Peter Kelly (pmk@post.com)
4 * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with this library; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include "HandleHeap.h"
26 #include "HandleStack.h"
27 #include "MarkedBlock.h"
28 #include "MarkedBlockSet.h"
30 #include "SlotVisitor.h"
31 #include "WriteBarrierSupport.h"
32 #include <wtf/Forward.h>
33 #include <wtf/HashCountedSet.h>
34 #include <wtf/HashSet.h>
38 class GCActivityCallback;
39 class GlobalCodeBlock;
40 class HeapRootVisitor;
44 class LiveObjectIterator;
45 class MarkedArgumentBuffer;
48 class WeakGCHandlePool;
51 typedef std::pair<JSValue, UString> ValueStringPair;
52 typedef HashCountedSet<JSCell*> ProtectCountSet;
53 typedef HashCountedSet<const char*> TypeCountSet;
55 enum OperationInProgress { NoOperation, Allocation, Collection };
58 enum HeapSize { SmallHeap, LargeHeap };
61 WTF_MAKE_NONCOPYABLE(Heap);
63 static Heap* heap(JSValue); // 0 for immediate values
64 static Heap* heap(JSCell*);
66 static bool isMarked(const void*);
67 static bool testAndSetMarked(const void*);
68 static bool testAndClearMarked(const void*);
69 static void setMarked(const void*);
71 static void writeBarrier(const JSCell*, JSValue);
72 static void writeBarrier(const JSCell*, JSCell*);
74 Heap(JSGlobalData*, HeapSize);
76 void destroy(); // JSGlobalData must call destroy() before ~Heap().
78 JSGlobalData* globalData() const { return m_globalData; }
79 NewSpace& markedSpace() { return m_newSpace; }
80 MachineThreads& machineThreads() { return m_machineThreads; }
82 GCActivityCallback* activityCallback();
83 void setActivityCallback(PassOwnPtr<GCActivityCallback>);
85 // true if an allocation or collection is in progress
88 void* allocate(size_t);
89 NewSpace::SizeClass& sizeClassFor(size_t);
90 void* allocate(NewSpace::SizeClass&);
91 void notifyIsSafeToCollect() { m_isSafeToCollect = true; }
92 void collectAllGarbage();
94 inline void* allocatePropertyStorage(size_t);
95 inline bool inPropertyStorageNursery(void*);
97 void reportExtraMemoryCost(size_t cost);
99 void protect(JSValue);
100 bool unprotect(JSValue); // True when the protect count drops to 0.
104 size_t objectCount();
105 size_t globalObjectCount();
106 size_t protectedObjectCount();
107 size_t protectedGlobalObjectCount();
108 PassOwnPtr<TypeCountSet> protectedObjectTypeCounts();
109 PassOwnPtr<TypeCountSet> objectTypeCounts();
111 void pushTempSortVector(Vector<ValueStringPair>*);
112 void popTempSortVector(Vector<ValueStringPair>*);
114 HashSet<MarkedArgumentBuffer*>& markListSet() { if (!m_markListSet) m_markListSet = new HashSet<MarkedArgumentBuffer*>; return *m_markListSet; }
116 template<typename Functor> typename Functor::ReturnType forEachProtectedCell(Functor&);
117 template<typename Functor> typename Functor::ReturnType forEachProtectedCell();
118 template<typename Functor> typename Functor::ReturnType forEachCell(Functor&);
119 template<typename Functor> typename Functor::ReturnType forEachCell();
120 template<typename Functor> typename Functor::ReturnType forEachBlock(Functor&);
121 template<typename Functor> typename Functor::ReturnType forEachBlock();
123 HandleSlot allocateGlobalHandle() { return m_handleHeap.allocate(); }
124 HandleSlot allocateLocalHandle() { return m_handleStack.push(); }
126 HandleStack* handleStack() { return &m_handleStack; }
127 void getConservativeRegisterRoots(HashSet<JSCell*>& roots);
130 friend class MarkedBlock;
132 typedef HashSet<MarkedBlock*>::iterator BlockIterator;
134 static const size_t minExtraCost = 256;
135 static const size_t maxExtraCost = 1024 * 1024;
137 enum AllocationEffort { AllocationMustSucceed, AllocationCanFail };
140 static void writeBarrierFastCase(const JSCell* owner, JSCell*);
143 bool isValidAllocation(size_t);
144 void reportExtraMemoryCostSlowCase(size_t);
145 void canonicalizeBlocks();
146 void resetAllocator();
148 MarkedBlock* allocateBlock(size_t cellSize, AllocationEffort);
149 void freeBlocks(MarkedBlock*);
153 void markProtectedObjects(HeapRootVisitor&);
154 void markTempSortVectors(HeapRootVisitor&);
155 void harvestWeakReferences();
157 void* tryAllocate(NewSpace::SizeClass&);
158 void* allocateSlowCase(NewSpace::SizeClass&);
160 enum SweepToggle { DoNotSweep, DoSweep };
161 void collect(SweepToggle);
163 void releaseFreeBlocks();
166 RegisterFile& registerFile();
168 static void writeBarrierSlowCase(const JSCell*, JSCell*);
170 #if ENABLE(LAZY_BLOCK_FREEING)
171 void waitForRelativeTimeWhileHoldingLock(double relative);
172 void waitForRelativeTime(double relative);
173 void blockFreeingThreadMain();
174 static void* blockFreeingThreadStartFunc(void* heap);
177 const HeapSize m_heapSize;
178 const size_t m_minBytesPerCycle;
180 OperationInProgress m_operationInProgress;
182 MarkedBlockSet m_blocks;
184 #if ENABLE(LAZY_BLOCK_FREEING)
185 DoublyLinkedList<MarkedBlock> m_freeBlocks;
186 size_t m_numberOfFreeBlocks;
188 ThreadIdentifier m_blockFreeingThread;
189 Mutex m_freeBlockLock;
190 ThreadCondition m_freeBlockCondition;
191 bool m_blockFreeingThreadShouldQuit;
194 #if ENABLE(SIMPLE_HEAP_PROFILING)
195 VTableSpectrum m_destroyedTypeCounts;
200 ProtectCountSet m_protectedValues;
201 Vector<Vector<ValueStringPair>* > m_tempSortingVectors;
202 HashSet<MarkedArgumentBuffer*>* m_markListSet;
204 OwnPtr<GCActivityCallback> m_activityCallback;
206 MachineThreads m_machineThreads;
207 SlotVisitor m_slotVisitor;
208 HandleHeap m_handleHeap;
209 HandleStack m_handleStack;
211 bool m_isSafeToCollect;
213 JSGlobalData* m_globalData;
218 return m_operationInProgress != NoOperation;
221 inline Heap* Heap::heap(JSCell* cell)
223 return MarkedBlock::blockFor(cell)->heap();
226 inline Heap* Heap::heap(JSValue v)
230 return heap(v.asCell());
233 inline bool Heap::isMarked(const void* cell)
235 return MarkedBlock::blockFor(cell)->isMarked(cell);
238 inline bool Heap::testAndSetMarked(const void* cell)
240 return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
243 inline bool Heap::testAndClearMarked(const void* cell)
245 return MarkedBlock::blockFor(cell)->testAndClearMarked(cell);
248 inline void Heap::setMarked(const void* cell)
250 MarkedBlock::blockFor(cell)->setMarked(cell);
254 inline void Heap::writeBarrierFastCase(const JSCell* owner, JSCell* cell)
256 if (MarkedBlock::blockFor(owner)->inNewSpace())
258 writeBarrierSlowCase(owner, cell);
261 inline void Heap::writeBarrier(const JSCell* owner, JSCell* cell)
263 WriteBarrierCounters::countWriteBarrier();
264 writeBarrierFastCase(owner, cell);
267 inline void Heap::writeBarrier(const JSCell* owner, JSValue value)
269 WriteBarrierCounters::countWriteBarrier();
274 writeBarrierFastCase(owner, value.asCell());
278 inline void Heap::writeBarrier(const JSCell*, JSCell*)
280 WriteBarrierCounters::countWriteBarrier();
283 inline void Heap::writeBarrier(const JSCell*, JSValue)
285 WriteBarrierCounters::countWriteBarrier();
289 inline void Heap::reportExtraMemoryCost(size_t cost)
291 if (cost > minExtraCost)
292 reportExtraMemoryCostSlowCase(cost);
295 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell(Functor& functor)
297 canonicalizeBlocks();
298 ProtectCountSet::iterator end = m_protectedValues.end();
299 for (ProtectCountSet::iterator it = m_protectedValues.begin(); it != end; ++it)
301 m_handleHeap.forEachStrongHandle(functor, m_protectedValues);
303 return functor.returnValue();
306 template<typename Functor> inline typename Functor::ReturnType Heap::forEachProtectedCell()
309 return forEachProtectedCell(functor);
312 template<typename Functor> inline typename Functor::ReturnType Heap::forEachCell(Functor& functor)
314 canonicalizeBlocks();
315 BlockIterator end = m_blocks.set().end();
316 for (BlockIterator it = m_blocks.set().begin(); it != end; ++it)
317 (*it)->forEachCell(functor);
318 return functor.returnValue();
321 template<typename Functor> inline typename Functor::ReturnType Heap::forEachCell()
324 return forEachCell(functor);
327 template<typename Functor> inline typename Functor::ReturnType Heap::forEachBlock(Functor& functor)
329 canonicalizeBlocks();
330 BlockIterator end = m_blocks.set().end();
331 for (BlockIterator it = m_blocks.set().begin(); it != end; ++it)
333 return functor.returnValue();
336 template<typename Functor> inline typename Functor::ReturnType Heap::forEachBlock()
339 return forEachBlock(functor);
342 inline NewSpace::SizeClass& Heap::sizeClassFor(size_t bytes)
344 return m_newSpace.sizeClassFor(bytes);
347 inline void* Heap::allocate(NewSpace::SizeClass& sizeClass)
349 // This is a light-weight fast path to cover the most common case.
350 MarkedBlock::FreeCell* firstFreeCell = sizeClass.firstFreeCell;
351 if (UNLIKELY(!firstFreeCell))
352 return allocateSlowCase(sizeClass);
354 sizeClass.firstFreeCell = firstFreeCell->next;
355 return firstFreeCell;
358 inline void* Heap::allocate(size_t bytes)
360 ASSERT(isValidAllocation(bytes));
361 NewSpace::SizeClass& sizeClass = sizeClassFor(bytes);
362 return allocate(sizeClass);
365 inline void* Heap::allocatePropertyStorage(size_t bytes)
367 ASSERT(!(bytes % sizeof(JSValue)));
368 if (bytes >= NewSpace::PropertyStorageNurserySize)
370 if (void* result = m_newSpace.allocatePropertyStorage(bytes))
373 return m_newSpace.allocatePropertyStorage(bytes);
376 inline bool Heap::inPropertyStorageNursery(void* ptr)
378 return m_newSpace.inPropertyStorageNursery(ptr);