summaryrefslogtreecommitdiffstats
path: root/tests
diff options
context:
space:
mode:
authorQt Forward Merge Bot <qt_forward_merge_bot@qt-project.org>2020-05-14 14:01:32 +0200
committerQt Forward Merge Bot <qt_forward_merge_bot@qt-project.org>2020-05-14 14:01:32 +0200
commit76fe36645eb41fa35e526a97874f1e15a39c3823 (patch)
treeaea94a0ee4fbd3e7187666999d3b9b825c7a43af /tests
parentd68f3a81c9073c64e4492dfca33eeafccb460c6f (diff)
parentba3b53cb501a77144aa6259e48a8e0edc3d1481d (diff)
Merge remote-tracking branch 'origin/5.15.0' into 5.15
Diffstat (limited to 'tests')
-rw-r--r--tests/auto/corelib/serialization/cborlargedatavalidation.cpp20
-rw-r--r--tests/auto/corelib/serialization/qcborvalue/tst_qcborvalue.cpp15
2 files changed, 30 insertions, 5 deletions
diff --git a/tests/auto/corelib/serialization/cborlargedatavalidation.cpp b/tests/auto/corelib/serialization/cborlargedatavalidation.cpp
index 9abfe0f575..f3b6893957 100644
--- a/tests/auto/corelib/serialization/cborlargedatavalidation.cpp
+++ b/tests/auto/corelib/serialization/cborlargedatavalidation.cpp
@@ -81,19 +81,31 @@ qint64 LargeIODevice::readData(char *data, qint64 maxlen)
void addValidationLargeData(qsizetype minInvalid, qsizetype maxInvalid)
{
- char toolong[2 + sizeof(qsizetype)] = { char(0x81) };
+ char toolong[1 + sizeof(qsizetype)];
for (qsizetype v = maxInvalid; v >= minInvalid; --v) {
// 0x5a for 32-bit, 0x5b for 64-bit
- toolong[1] = sizeof(v) > 4 ? 0x5b : 0x5a;
- qToBigEndian(v, toolong + 2);
+ toolong[0] = sizeof(v) > 4 ? 0x5b : 0x5a;
+ qToBigEndian(v, toolong + 1);
QTest::addRow("bytearray-too-big-for-qbytearray-%llx", v)
<< QByteArray(toolong, sizeof(toolong)) << 0 << CborErrorDataTooLarge;
- toolong[1] |= 0x20;
+ QTest::addRow("bytearray-chunked-too-big-for-qbytearray-%llx", v)
+ << ('\x5f' + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
+ QTest::addRow("bytearray-2chunked-too-big-for-qbytearray-%llx", v)
+ << ("\x5f\x40" + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
+ toolong[0] |= 0x20;
// QCborStreamReader::readString copies to a QByteArray first
QTest::addRow("string-too-big-for-qbytearray-%llx", v)
<< QByteArray(toolong, sizeof(toolong)) << 0 << CborErrorDataTooLarge;
+ QTest::addRow("string-chunked-too-big-for-qbytearray-%llx", v)
+ << ('\x7f' + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
+ QTest::addRow("string-2chunked-too-big-for-qbytearray-%llx", v)
+ << ("\x7f\x60" + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
}
}
diff --git a/tests/auto/corelib/serialization/qcborvalue/tst_qcborvalue.cpp b/tests/auto/corelib/serialization/qcborvalue/tst_qcborvalue.cpp
index 9c1341e252..1379cc348d 100644
--- a/tests/auto/corelib/serialization/qcborvalue/tst_qcborvalue.cpp
+++ b/tests/auto/corelib/serialization/qcborvalue/tst_qcborvalue.cpp
@@ -1926,11 +1926,24 @@ void tst_QCborValue::validation_data()
// Add QCborStreamReader-specific limitations due to use of QByteArray and
// QString, which are allocated by QArrayData::allocate().
const qsizetype MaxInvalid = std::numeric_limits<QByteArray::size_type>::max();
- const qsizetype MinInvalid = MaxByteArraySize + 1;
+ const qsizetype MinInvalid = MaxByteArraySize + 1 - sizeof(QByteArray::size_type);
addValidationColumns();
addValidationData(MinInvalid);
addValidationLargeData(MinInvalid, MaxInvalid);
+ // Chunked strings whose total overflows the limit, but each individual
+ // chunk doesn't. 0x5a for 32-bit, 0x5b for 64-bit.
+ char toolong[1 + sizeof(qsizetype)];
+ toolong[0] = sizeof(MinInvalid) > 4 ? 0x5b : 0x5a;
+ qToBigEndian(MinInvalid - 1, toolong + 1);
+ QTest::addRow("bytearray-2chunked+1-too-big-for-qbytearray-%llx", MinInvalid)
+ << ("\x5f\x41z" + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
+ toolong[0] |= 0x20;
+ QTest::addRow("string-2chunked+1-too-big-for-qbytearray-%llx", MinInvalid)
+ << ("\x7f\x61z" + QByteArray(toolong, sizeof(toolong)) + '\xff')
+ << 0 << CborErrorDataTooLarge;
+
// These tests say we have arrays and maps with very large item counts.
// They are meant to ensure we don't pre-allocate a lot of memory
// unnecessarily and possibly crash the application. The actual number of