aboutsummaryrefslogtreecommitdiffstats
path: root/src/qml/memory
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2017-02-08 16:21:02 +0100
committerLars Knoll <lars.knoll@qt.io>2017-03-09 08:59:20 +0000
commit2a554434a571dcefd26cf10ef8c5ae8b3b7d66db (patch)
tree5532f0a0206fbbde0a3099ff1e0ee58188a97275 /src/qml/memory
parent05de4e044f92dd278a00e410be8f070bc4d66e6f (diff)
Implement a real write barrier
Implement a Steel write barrier for our objects. The barrier is interesting as it can also be used for incremental GC runs by simply turning the barrier on and leaving old objects marked as black. Change-Id: I0b273974d94a990dee3cd9298089b8b202c75bf2 Reviewed-by: Simon Hausmann <simon.hausmann@qt.io>
Diffstat (limited to 'src/qml/memory')
-rw-r--r--src/qml/memory/qv4heap_p.h6
-rw-r--r--src/qml/memory/qv4mm.cpp2
-rw-r--r--src/qml/memory/qv4mm_p.h13
-rw-r--r--src/qml/memory/qv4mmdefs_p.h11
-rw-r--r--src/qml/memory/qv4writebarrier_p.h100
5 files changed, 114 insertions, 18 deletions
diff --git a/src/qml/memory/qv4heap_p.h b/src/qml/memory/qv4heap_p.h
index bcd1af7705..7bedd705f9 100644
--- a/src/qml/memory/qv4heap_p.h
+++ b/src/qml/memory/qv4heap_p.h
@@ -113,6 +113,12 @@ struct Q_QML_EXPORT Base {
Q_ASSERT(!Chunk::testBit(c->extendsBitmap, h - c->realBase()));
return Chunk::setBit(c->blackBitmap, h - c->realBase());
}
+ inline void setGrayBit() {
+ const HeapItem *h = reinterpret_cast<const HeapItem *>(this);
+ Chunk *c = h->chunk();
+ Q_ASSERT(!Chunk::testBit(c->extendsBitmap, h - c->realBase()));
+ return Chunk::setBit(c->grayBitmap, h - c->realBase());
+ }
inline bool inUse() const {
const HeapItem *h = reinterpret_cast<const HeapItem *>(this);
diff --git a/src/qml/memory/qv4mm.cpp b/src/qml/memory/qv4mm.cpp
index f42d509942..8f4f2f4aa8 100644
--- a/src/qml/memory/qv4mm.cpp
+++ b/src/qml/memory/qv4mm.cpp
@@ -251,7 +251,9 @@ void Chunk::sweep()
// DEBUG << "sweeping chunk" << this << (*freeList);
HeapItem *o = realBase();
for (uint i = 0; i < Chunk::EntriesInBitmap; ++i) {
+#if WRITEBARRIER(none)
Q_ASSERT((grayBitmap[i] | blackBitmap[i]) == blackBitmap[i]); // check that we don't have gray only objects
+#endif
quintptr toFree = objectBitmap[i] ^ blackBitmap[i];
Q_ASSERT((toFree & objectBitmap[i]) == toFree); // check all black objects are marked as being used
quintptr e = extendsBitmap[i];
diff --git a/src/qml/memory/qv4mm_p.h b/src/qml/memory/qv4mm_p.h
index 6e9303acb6..3e542b0aa3 100644
--- a/src/qml/memory/qv4mm_p.h
+++ b/src/qml/memory/qv4mm_p.h
@@ -80,27 +80,28 @@ struct StackAllocator {
StackAllocator(ChunkAllocator *chunkAlloc);
T *allocate() {
- T *m = nextFree->as<T>();
+ HeapItem *m = nextFree;
if (Q_UNLIKELY(nextFree == lastInChunk)) {
nextChunk();
} else {
nextFree += requiredSlots;
}
-#if MM_DEBUG
+#if !defined(QT_NO_DEBUG) || defined(MM_DEBUG)
Chunk *c = m->chunk();
Chunk::setBit(c->objectBitmap, m - c->realBase());
#endif
- return m;
+ return m->as<T>();
}
void free() {
-#if MM_DEBUG
- Chunk::clearBit(item->chunk()->objectBitmap, item - item->chunk()->realBase());
-#endif
if (Q_UNLIKELY(nextFree == firstInChunk)) {
prevChunk();
} else {
nextFree -= requiredSlots;
}
+#if !defined(QT_NO_DEBUG) || defined(MM_DEBUG)
+ Chunk *c = nextFree->chunk();
+ Chunk::clearBit(c->objectBitmap, nextFree - c->realBase());
+#endif
}
void nextChunk();
diff --git a/src/qml/memory/qv4mmdefs_p.h b/src/qml/memory/qv4mmdefs_p.h
index 3f65e97d86..987e669040 100644
--- a/src/qml/memory/qv4mmdefs_p.h
+++ b/src/qml/memory/qv4mmdefs_p.h
@@ -255,6 +255,17 @@ Q_STATIC_ASSERT(sizeof(HeapItem) == Chunk::SlotSize);
Q_STATIC_ASSERT(QT_POINTER_SIZE*8 == Chunk::Bits);
Q_STATIC_ASSERT((1 << Chunk::BitShift) == Chunk::Bits);
+// Base class for the execution engine
+
+struct EngineBase {
+ Heap::ExecutionContext *current = 0;
+
+ Value *jsStackTop = 0;
+ quint8 hasException = false;
+ quint8 writeBarrierActive = false;
+ quint16 unused = 0;
+};
+
// Some helper classes and macros to automate the generation of our
// tables used for marking objects
diff --git a/src/qml/memory/qv4writebarrier_p.h b/src/qml/memory/qv4writebarrier_p.h
index c0f508f962..838ed7a456 100644
--- a/src/qml/memory/qv4writebarrier_p.h
+++ b/src/qml/memory/qv4writebarrier_p.h
@@ -55,6 +55,11 @@
QT_BEGIN_NAMESPACE
+#define WRITEBARRIER_steele -1
+#define WRITEBARRIER_none 1
+
+#define WRITEBARRIER(x) (1/WRITEBARRIER_##x == 1)
+
namespace QV4 {
namespace WriteBarrier {
@@ -64,20 +69,79 @@ enum Type {
Barrier
};
-inline void write(QV4::ExecutionEngine *engine, QV4::Heap::Base *base, QV4::Value *slot, QV4::Value value)
+enum NewValueType {
+ Primitive,
+ Object,
+ Unknown
+};
+
+// ### this needs to be filled with a real memory fence once marking is concurrent
+Q_ALWAYS_INLINE void fence() {}
+
+#if WRITEBARRIER(steele)
+
+template <NewValueType type>
+static Q_CONSTEXPR inline bool isRequired() {
+ return type != Primitive;
+}
+
+inline void write(EngineBase *engine, Heap::Base *base, Value *slot, Value value)
+{
+ *slot = value;
+ if (engine->writeBarrierActive && isRequired<Unknown>()) {
+ fence();
+ base->setGrayBit();
+ }
+}
+
+inline void write(EngineBase *engine, Heap::Base *base, Value *slot, Heap::Base *value)
+{
+ *slot = value;
+ if (engine->writeBarrierActive && isRequired<Object>()) {
+ fence();
+ base->setGrayBit();
+ }
+}
+
+inline void write(EngineBase *engine, Heap::Base *base, Heap::Base **slot, Heap::Base *value)
+{
+ *slot = value;
+ if (engine->writeBarrierActive) {
+ fence();
+ base->setGrayBit();
+ }
+}
+
+#elif WRITEBARRIER(none)
+
+template <NewValueType type>
+static Q_CONSTEXPR inline bool isRequired() {
+ return false;
+}
+
+inline void write(EngineBase *engine, Heap::Base *base, Value *slot, Value value)
+{
+ Q_UNUSED(engine);
+ Q_UNUSED(base);
+ *slot = value;
+}
+
+inline void write(EngineBase *engine, Heap::Base *base, Value *slot, Heap::Base *value)
{
Q_UNUSED(engine);
Q_UNUSED(base);
*slot = value;
}
-inline void write(QV4::ExecutionEngine *engine, QV4::Heap::Base *base, QV4::Heap::Base **slot, QV4::Heap::Base *value)
+inline void write(EngineBase *engine, Heap::Base *base, Heap::Base **slot, Heap::Base *value)
{
Q_UNUSED(engine);
Q_UNUSED(base);
*slot = value;
}
+#endif
+
}
namespace Heap {
@@ -88,9 +152,14 @@ struct Pointer {
T operator->() const { return ptr; }
operator T () const { return ptr; }
+ Heap::Base *base() {
+ Heap::Base *base = reinterpret_cast<Heap::Base *>(this) - (offset/sizeof(Heap::Base));
+ Q_ASSERT(base->inUse());
+ return base;
+ }
+
void set(ExecutionEngine *e, T newVal) {
- Q_UNUSED(e);
- ptr = newVal;
+ WriteBarrier::write(e, base(), reinterpret_cast<Heap::Base **>(&ptr), reinterpret_cast<Heap::Base *>(newVal));
}
template <typename Type>
@@ -106,9 +175,14 @@ V4_ASSERT_IS_TRIVIAL(V4PointerCheck)
template <size_t offset>
struct HeapValue : Value {
+ Heap::Base *base() {
+ Heap::Base *base = reinterpret_cast<Heap::Base *>(this) - (offset/sizeof(Heap::Base));
+ Q_ASSERT(base->inUse());
+ return base;
+ }
+
void set(ExecutionEngine *e, const Value &newVal) {
- Q_UNUSED(e);
- setRawValue(newVal.rawValue());
+ WriteBarrier::write(e, base(), this, newVal);
}
};
@@ -118,15 +192,17 @@ struct ValueArray {
uint alloc;
Value values[1];
+ Heap::Base *base() {
+ Heap::Base *base = reinterpret_cast<Heap::Base *>(this) - (offset/sizeof(Heap::Base));
+ Q_ASSERT(base->inUse());
+ return base;
+ }
+
void set(ExecutionEngine *e, uint index, Value v) {
- Q_UNUSED(e);
- Q_ASSERT(index < alloc);
- values[index] = v;
+ WriteBarrier::write(e, base(), values + index, v);
}
void set(ExecutionEngine *e, uint index, Heap::Base *b) {
- Q_UNUSED(e);
- Q_ASSERT(index < alloc);
- values[index] = b;
+ WriteBarrier::write(e, base(), values + index, b);
}
inline const Value &operator[] (uint index) const {
Q_ASSERT(index < alloc);