summaryrefslogtreecommitdiffstats
path: root/src/corelib/tools
diff options
context:
space:
mode:
authorLars Knoll <lars.knoll@qt.io>2020-10-28 21:35:33 +0100
committerLars Knoll <lars.knoll@qt.io>2020-11-04 11:21:22 +0100
commitb99271caa6231ad753bc796dae5202ebc1cb9440 (patch)
treeed1d59d157ff92a774060f023547b0e654cfbebc /src/corelib/tools
parente5f80c1b0310412993e781691f39aa2b25e404fb (diff)
Avoid expensive iterator calculations in append()
Avoid moving data inside the array to create free space at one end. This is a performance bottleneck, as it required quite a lot of calculations for every insert. Rather reallocate and grow in this case, so we only need to do expensive work when we reallocate the array. Change-Id: Ifc955fbcf9967c3b66aa2600e0627aac15f0c917 Reviewed-by: Thiago Macieira <thiago.macieira@intel.com> Reviewed-by: Andrei Golubev <andrei.golubev@qt.io>
Diffstat (limited to 'src/corelib/tools')
-rw-r--r--src/corelib/tools/qarraydataops.h60
-rw-r--r--src/corelib/tools/qarraydatapointer.h31
-rw-r--r--src/corelib/tools/qlist.h66
3 files changed, 91 insertions, 66 deletions
diff --git a/src/corelib/tools/qarraydataops.h b/src/corelib/tools/qarraydataops.h
index 3b95d04fa8..507f8f153f 100644
--- a/src/corelib/tools/qarraydataops.h
+++ b/src/corelib/tools/qarraydataops.h
@@ -1035,6 +1035,7 @@ template <class T>
struct QCommonArrayOps : QArrayOpsSelector<T>::Type
{
using Base = typename QArrayOpsSelector<T>::Type;
+ using Data = QTypedArrayData<T>;
using parameter_type = typename Base::parameter_type;
using iterator = typename Base::iterator;
using const_iterator = typename Base::const_iterator;
@@ -1239,13 +1240,6 @@ protected:
}
}
- size_t moveSizeForAppend(size_t)
- {
- // Qt5 QList in append: make 100% free space at end if not enough space
- // Now:
- return this->freeSpaceAtBegin();
- }
-
size_t moveSizeForPrepend(size_t required)
{
// Qt5 QList in prepend: make 33% of all space at front if not enough space
@@ -1255,21 +1249,11 @@ protected:
return qMin(space, this->freeSpaceAtEnd());
}
- // Helper functions that reduce usage boilerplate
- void prepareSpaceForAppend(size_t required)
- {
- prepareFreeSpace(GrowsForwardTag{}, required, moveSizeForAppend(required));
- }
void prepareSpaceForPrepend(size_t required)
{
prepareFreeSpace(GrowsBackwardsTag{}, required, moveSizeForPrepend(required));
}
template<typename It>
- void prepareSpaceForAppend(It &b, It &e, size_t required)
- {
- prepareFreeSpace(GrowsForwardTag{}, required, moveSizeForAppend(required), b, e);
- }
- template<typename It>
void prepareSpaceForPrepend(It &b, It &e, size_t required)
{
prepareFreeSpace(GrowsBackwardsTag{}, required, moveSizeForPrepend(required), b, e);
@@ -1304,6 +1288,28 @@ protected:
}
public:
+
+ // does the iterator point into this array?
+ template <typename It>
+ bool iteratorPointsIntoArray(const It &it)
+ {
+ using DecayedIt = std::decay_t<It>;
+ using RemovedConstVolatileIt = std::remove_cv_t<It>;
+ constexpr bool selfIterator =
+ // if passed type is an iterator type:
+ std::is_same_v<DecayedIt, iterator>
+ || std::is_same_v<DecayedIt, const_iterator>
+ // if passed type is a pointer type:
+ || std::is_same_v<RemovedConstVolatileIt, T *>
+ || std::is_same_v<RemovedConstVolatileIt, const T *>
+ || std::is_same_v<RemovedConstVolatileIt, const volatile T *>;
+ if constexpr (selfIterator) {
+ return (it >= this->begin() && it <= this->end());
+ } else {
+ return false;
+ }
+ }
+
// Returns whether reallocation is desirable before adding more elements
// into the container. This is a helper function that one can use to
// theoretically improve average operations performance. Ignoring this
@@ -1358,11 +1364,11 @@ public:
Q_ASSERT(this->isMutable() || b == e);
Q_ASSERT(!this->isShared() || b == e);
Q_ASSERT(b <= e);
- Q_ASSERT((e - b) <= this->allocatedCapacity() - this->size);
+ Q_ASSERT((e - b) <= this->freeSpaceAtEnd());
+
if (b == e) // short-cut and handling the case b and e == nullptr
return;
- prepareSpaceForAppend(b, e, e - b); // ### perf. loss
Base::insert(GrowsForwardTag{}, this->end(), b, e);
}
@@ -1373,11 +1379,10 @@ public:
{
Q_ASSERT(this->isMutable() || b == e);
Q_ASSERT(!this->isShared() || b == e);
+
const qsizetype distance = std::distance(b, e);
Q_ASSERT(distance >= 0 && distance <= this->allocatedCapacity() - this->size);
- prepareSpaceForAppend(b, e, distance); // ### perf. loss
-
T *iter = this->end();
for (; b != e; ++iter, ++b) {
new (iter) T(*b);
@@ -1391,10 +1396,10 @@ public:
Q_ASSERT(!this->isShared() || b == e);
Q_ASSERT(b <= e);
Q_ASSERT((e - b) <= this->allocatedCapacity() - this->size);
+
if (b == e) // short-cut and handling the case b and e == nullptr
return;
- prepareSpaceForAppend(b, e, e - b); // ### perf. loss
Base::moveAppend(b, e);
}
@@ -1403,10 +1408,7 @@ public:
Q_ASSERT(!this->isShared() || n == 0);
Q_ASSERT(size_t(this->allocatedCapacity() - this->size) >= n);
- // Preserve the value, because it might be a reference to some part of the moved chunk
- T tmp(t);
- prepareSpaceForAppend(n); // ### perf. loss
- Base::insert(GrowsForwardTag{}, this->end(), n, tmp);
+ Base::insert(GrowsForwardTag{}, this->end(), n, t);
}
void insert(T *where, const T *b, const T *e)
@@ -1439,11 +1441,11 @@ public:
Base::insert(GrowsForwardTag{}, where, b + k, e);
}
- void insert(T *where, size_t n, parameter_type t)
+ void insert(T *where, qsizetype n, parameter_type t)
{
Q_ASSERT(!this->isShared() || (n == 0 && where == this->end()));
Q_ASSERT(where >= this->begin() && where <= this->end());
- Q_ASSERT(size_t(this->allocatedCapacity() - this->size) >= n);
+ Q_ASSERT(this->allocatedCapacity() - this->size >= n);
if (this->size > 0 && where == this->begin()) { // prepend case - special space arrangement
// Preserve the value, because it might be a reference to some part of the moved chunk
@@ -1451,7 +1453,7 @@ public:
prepareSpaceForPrepend(n); // ### perf. loss
Base::insert(GrowsBackwardsTag{}, this->begin(), n, tmp);
return;
- } else if (where == this->end()) { // append case - special space arrangement
+ } else if (where == this->end() && n <= this->freeSpaceAtEnd()) { // append case - special space arrangement
copyAppend(n, t);
return;
}
diff --git a/src/corelib/tools/qarraydatapointer.h b/src/corelib/tools/qarraydatapointer.h
index 3cabeca649..398dc1a607 100644
--- a/src/corelib/tools/qarraydatapointer.h
+++ b/src/corelib/tools/qarraydatapointer.h
@@ -235,6 +235,37 @@ public:
return QArrayDataPointer(header, dataPtr);
}
+ enum AllocationPosition {
+ AllocateAtEnd,
+ AllocateAtBeginning
+ };
+ // allocate and grow. Ensure that at the minimum requiredSpace is available at the requested end
+ static QArrayDataPointer allocateGrow(const QArrayDataPointer &from, qsizetype n, AllocationPosition position)
+ {
+ // calculate new capacity. We keep the free capacity at the side that does not have to grow
+ // to avoid quadratic behavior with mixed append/prepend cases
+
+ // use qMax below, because constAllocatedCapacity() can be 0 when using fromRawData()
+ qsizetype minimalCapacity = qMax(from.size, from.constAllocatedCapacity()) + n;
+ // subtract the free space at the side we want to allocate. This ensures that the total size requested is
+ // the existing allocation at the other side + size + n.
+ minimalCapacity -= (position == AllocateAtEnd) ? from.freeSpaceAtEnd() : from.freeSpaceAtBegin();
+ qsizetype capacity = from.detachCapacity(minimalCapacity);
+ const bool grows = capacity > from.constAllocatedCapacity();
+ auto [header, dataPtr] = Data::allocate(capacity, grows ? QArrayData::GrowsBackwards : QArrayData::DefaultAllocationFlags);
+ const bool valid = header != nullptr && dataPtr != nullptr;
+ if (!valid)
+ return QArrayDataPointer(header, dataPtr);
+
+ // Idea: * when growing backwards, adjust pointer to prepare free space at the beginning
+ // * when growing forward, adjust by the previous data pointer offset
+
+ // TODO: what's with CapacityReserved?
+ dataPtr += (position == AllocateAtBeginning) ? qMax(0, (header->alloc - from.size - n) / 2)
+ : from.freeSpaceAtBegin();
+ return QArrayDataPointer(header, dataPtr);
+ }
+
friend bool operator==(const QArrayDataPointer &lhs, const QArrayDataPointer &rhs) noexcept
{
return lhs.data() == rhs.data() && lhs.size == rhs.size;
diff --git a/src/corelib/tools/qlist.h b/src/corelib/tools/qlist.h
index 6fdc5b19c4..e278b70b2d 100644
--- a/src/corelib/tools/qlist.h
+++ b/src/corelib/tools/qlist.h
@@ -314,7 +314,12 @@ public:
{ append(const_iterator(std::addressof(t)), const_iterator(std::addressof(t)) + 1); }
void append(const_iterator i1, const_iterator i2);
void append(rvalue_ref t) { emplaceBack(std::move(t)); }
- void append(const QList<T> &l) { append(l.constBegin(), l.constEnd()); }
+ void append(const QList<T> &l)
+ {
+ // protect against l == *this
+ QList list(l);
+ append(list.constBegin(), list.constEnd());
+ }
void append(QList<T> &&l);
void prepend(rvalue_ref t) { emplaceFront(std::move(t)); }
void prepend(parameter_type t) { emplaceFront(t); }
@@ -530,7 +535,7 @@ public:
void shrink_to_fit() { squeeze(); }
// comfort
- QList<T> &operator+=(const QList<T> &l) { append(l.cbegin(), l.cend()); return *this; }
+ QList<T> &operator+=(const QList<T> &l) { append(l); return *this; }
QList<T> &operator+=(QList<T> &&l) { append(std::move(l)); return *this; }
inline QList<T> operator+(const QList<T> &l) const
{ QList n = *this; n += l; return n; }
@@ -666,11 +671,8 @@ inline void QList<T>::append(const_iterator i1, const_iterator i2)
if (i1 == i2)
return;
const auto distance = std::distance(i1, i2);
- const auto newSize = size() + distance;
- const bool shouldGrow = d->shouldGrowBeforeInsert(d.end(), qsizetype(distance));
- if (d->needsDetach() || newSize > d->allocatedCapacity() || shouldGrow) {
- DataPointer detached(DataPointer::allocateGrow(d, newSize,
- d->detachFlags() | Data::GrowsForward));
+ if (d->needsDetach() || distance > d.freeSpaceAtEnd()) {
+ DataPointer detached(DataPointer::allocateGrow(d, distance, DataPointer::AllocateAtEnd));
detached->copyAppend(constBegin(), constEnd());
detached->copyAppend(i1, i2);
d.swap(detached);
@@ -688,11 +690,8 @@ inline void QList<T>::append(QList<T> &&other)
if (other.d->needsDetach() || !std::is_nothrow_move_constructible_v<T>)
return append(other);
- const auto newSize = size() + other.size();
- const bool shouldGrow = d->shouldGrowBeforeInsert(d.end(), other.size());
- if (d->needsDetach() || newSize > d->allocatedCapacity() || shouldGrow) {
- DataPointer detached(DataPointer::allocateGrow(d, newSize,
- d->detachFlags() | Data::GrowsForward));
+ if (d->needsDetach() || other.size() > d.freeSpaceAtEnd()) {
+ DataPointer detached(DataPointer::allocateGrow(d, other.size(), DataPointer::AllocateAtEnd));
if (!d->needsDetach())
detached->moveAppend(begin(), end());
@@ -711,17 +710,14 @@ template<typename T>
template<typename... Args>
inline typename QList<T>::reference QList<T>::emplaceFront(Args &&... args)
{
- const bool shouldGrow = d->shouldGrowBeforeInsert(d.begin(), 1);
- const auto newSize = size() + 1;
- if (d->needsDetach() || newSize > d->constAllocatedCapacity() || shouldGrow) {
- const auto flags = d->detachFlags() | Data::GrowsBackwards;
- DataPointer detached(DataPointer::allocateGrow(d, newSize, flags));
+ if (d->needsDetach() || !d.freeSpaceAtBegin()) {
+ DataPointer detached(DataPointer::allocateGrow(d, 1, DataPointer::AllocateAtBeginning));
- T tmp(std::forward<Args>(args)...);
- detached->copyAppend(constBegin(), constEnd());
- // insert here makes sure we have extra free space at beginning. we
- // actually need a proper copyPrepend here instead.
- detached->insert(detached.begin(), 1, std::move(tmp));
+ detached->emplace(detached.begin(), std::forward<Args>(args)...);
+ if (!d.needsDetach())
+ detached->moveAppend(d.begin(), d.end());
+ else
+ detached->copyAppend(constBegin(), constEnd());
d.swap(detached);
} else {
// ### replace with emplaceFront
@@ -740,14 +736,12 @@ QList<T>::insert(qsizetype i, qsizetype n, parameter_type t)
// we don't have a quick exit for n == 0
// it's not worth wasting CPU cycles for that
- const auto newSize = size() + n;
- const bool shouldGrow = d->shouldGrowBeforeInsert(d.begin() + i, n);
- if (d->needsDetach() || newSize > d->allocatedCapacity() || shouldGrow) {
- typename Data::ArrayOptions flags = d->detachFlags() | Data::GrowsForward;
- if (d.size != 0 && i <= d.size / 4)
- flags |= Data::GrowsBackwards;
+ if (d->needsDetach() || (n > d.freeSpaceAtBegin() && n > d.freeSpaceAtEnd())) {
+ typename DataPointer::AllocationPosition pos = DataPointer::AllocateAtEnd;
+ if (d.size != 0 && i <= (d.size >> 1))
+ pos = DataPointer::AllocateAtBeginning;
- DataPointer detached(DataPointer::allocateGrow(d, newSize, flags));
+ DataPointer detached(DataPointer::allocateGrow(d, n, pos));
const_iterator where = constBegin() + i;
detached->copyAppend(constBegin(), where);
detached->copyAppend(n, t);
@@ -755,7 +749,7 @@ QList<T>::insert(qsizetype i, qsizetype n, parameter_type t)
d.swap(detached);
} else {
// we're detached and we can just move data around
- if (i == size()) {
+ if (i == size() && n <= d.freeSpaceAtEnd()) {
d->copyAppend(n, t);
} else {
T copy(t);
@@ -772,14 +766,12 @@ QList<T>::emplace(qsizetype i, Args&&... args)
{
Q_ASSERT_X(i >= 0 && i <= d->size, "QList<T>::insert", "index out of range");
- const bool shouldGrow = d->shouldGrowBeforeInsert(d.begin() + i, 1);
- const auto newSize = size() + 1;
- if (d->needsDetach() || newSize > d->allocatedCapacity() || shouldGrow) {
- typename Data::ArrayOptions flags = d->detachFlags() | Data::GrowsForward;
- if (d.size != 0 && i <= d.size / 4)
- flags |= Data::GrowsBackwards;
+ if (d->needsDetach() || (d.size == d.constAllocatedCapacity())) {
+ typename DataPointer::AllocationPosition pos = DataPointer::AllocateAtEnd;
+ if (d.size != 0 && i <= (d.size >> 1))
+ pos = DataPointer::AllocateAtBeginning;
- DataPointer detached(DataPointer::allocateGrow(d, newSize, flags));
+ DataPointer detached(DataPointer::allocateGrow(d, 1, pos));
const_iterator where = constBegin() + i;
// Create an element here to handle cases when a user moves the element
// from a container to the same container. This is a critical step for