aboutsummaryrefslogtreecommitdiffstats
path: root/src/qml/memory/qv4mm_p.h
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2017-02-08 16:21:02 +0100
committerLars Knoll <lars.knoll@qt.io>2017-03-09 08:59:20 +0000
commit2a554434a571dcefd26cf10ef8c5ae8b3b7d66db (patch)
tree5532f0a0206fbbde0a3099ff1e0ee58188a97275 /src/qml/memory/qv4mm_p.h
parent05de4e044f92dd278a00e410be8f070bc4d66e6f (diff)
Implement a real write barrier
Implement a Steel write barrier for our objects. The barrier is interesting as it can also be used for incremental GC runs by simply turning the barrier on and leaving old objects marked as black. Change-Id: I0b273974d94a990dee3cd9298089b8b202c75bf2 Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
Diffstat (limited to 'src/qml/memory/qv4mm_p.h')
-rw-r--r--src/qml/memory/qv4mm_p.h13
1 files changed, 7 insertions, 6 deletions
diff --git a/src/qml/memory/qv4mm_p.h b/src/qml/memory/qv4mm_p.h
index 6e9303acb6..3e542b0aa3 100644
--- a/src/qml/memory/qv4mm_p.h
+++ b/src/qml/memory/qv4mm_p.h
@@ -80,27 +80,28 @@ struct StackAllocator {
StackAllocator(ChunkAllocator *chunkAlloc);
T *allocate() {
- T *m = nextFree->as<T>();
+ HeapItem *m = nextFree;
if (Q_UNLIKELY(nextFree == lastInChunk)) {
nextChunk();
} else {
nextFree += requiredSlots;
}
-#if MM_DEBUG
+#if !defined(QT_NO_DEBUG) || defined(MM_DEBUG)
Chunk *c = m->chunk();
Chunk::setBit(c->objectBitmap, m - c->realBase());
#endif
- return m;
+ return m->as<T>();
}
void free() {
-#if MM_DEBUG
- Chunk::clearBit(item->chunk()->objectBitmap, item - item->chunk()->realBase());
-#endif
if (Q_UNLIKELY(nextFree == firstInChunk)) {
prevChunk();
} else {
nextFree -= requiredSlots;
}
+#if !defined(QT_NO_DEBUG) || defined(MM_DEBUG)
+ Chunk *c = nextFree->chunk();
+ Chunk::clearBit(c->objectBitmap, nextFree - c->realBase());
+#endif
}
void nextChunk();