QBenchlib: add support for a measurer reporting multiple results
Implemented for the Linux Perf measurer, with four measurements by default. RESULT : tst_MyClass::QString_toInt(): 149.574444 CPU cycles per iteration (total: 149,574,445, iterations: 1000000) RESULT : tst_MyClass::QString_toInt(): 620.000181 instructions per iteration (total: 620,000,182, iterations: 1000000) RESULT : tst_MyClass::QString_toInt(): 131.000046 branch instructions per iteration (total: 131,000,047, iterations: 1000000) RESULT : tst_MyClass::QString_toInt(): 32.118771 nsecs per iteration (total: 32,118,771, iterations: 1000000) Change-Id: I3c79b7e08fa346988dfefffd17202cda3df8431b Reviewed-by: Fabian Kosmale <fabian.kosmale@qt.io>
This commit is contained in:
parent
b5b00e7790
commit
4731baf6d3
@ -96,10 +96,13 @@ int QBenchmarkTestMethodData::adjustIterationCount(int suggestion)
|
|||||||
return iterationCount;
|
return iterationCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
void QBenchmarkTestMethodData::setResult(QBenchmarkMeasurerBase::Measurement m,
|
void QBenchmarkTestMethodData::setResults(const QList<QBenchmarkMeasurerBase::Measurement> &list,
|
||||||
bool setByMacro)
|
bool setByMacro)
|
||||||
{
|
{
|
||||||
bool accepted = false;
|
bool accepted = false;
|
||||||
|
QBenchmarkMeasurerBase::Measurement firstMeasurement = {};
|
||||||
|
if (!list.isEmpty())
|
||||||
|
firstMeasurement = list.constFirst();
|
||||||
|
|
||||||
// Always accept the result if the iteration count has been
|
// Always accept the result if the iteration count has been
|
||||||
// specified on the command line with -iterations.
|
// specified on the command line with -iterations.
|
||||||
@ -114,9 +117,9 @@ void QBenchmarkTestMethodData::setResult(QBenchmarkMeasurerBase::Measurement m,
|
|||||||
// Test the result directly without calling the measurer if the minimum time
|
// Test the result directly without calling the measurer if the minimum time
|
||||||
// has been specified on the command line with -minimumvalue.
|
// has been specified on the command line with -minimumvalue.
|
||||||
else if (QBenchmarkGlobalData::current->walltimeMinimum != -1)
|
else if (QBenchmarkGlobalData::current->walltimeMinimum != -1)
|
||||||
accepted = (m.value > QBenchmarkGlobalData::current->walltimeMinimum);
|
accepted = (firstMeasurement.value > QBenchmarkGlobalData::current->walltimeMinimum);
|
||||||
else
|
else
|
||||||
accepted = QBenchmarkGlobalData::current->measurer->isMeasurementAccepted(m);
|
accepted = QBenchmarkGlobalData::current->measurer->isMeasurementAccepted(firstMeasurement);
|
||||||
|
|
||||||
// Accept the result or double the number of iterations.
|
// Accept the result or double the number of iterations.
|
||||||
if (accepted)
|
if (accepted)
|
||||||
@ -124,8 +127,10 @@ void QBenchmarkTestMethodData::setResult(QBenchmarkMeasurerBase::Measurement m,
|
|||||||
else
|
else
|
||||||
iterationCount *= 2;
|
iterationCount *= 2;
|
||||||
|
|
||||||
this->result = QBenchmarkResult(QBenchmarkGlobalData::current->context, m,
|
valid = true;
|
||||||
iterationCount, setByMacro);
|
results.reserve(list.size());
|
||||||
|
for (auto m : list)
|
||||||
|
results.emplaceBack(QBenchmarkGlobalData::current->context, m, iterationCount, setByMacro);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*!
|
/*!
|
||||||
@ -157,8 +162,7 @@ QTest::QBenchmarkIterationController::QBenchmarkIterationController()
|
|||||||
*/
|
*/
|
||||||
QTest::QBenchmarkIterationController::~QBenchmarkIterationController()
|
QTest::QBenchmarkIterationController::~QBenchmarkIterationController()
|
||||||
{
|
{
|
||||||
QBenchmarkMeasurerBase::Measurement measurement = QTest::endBenchmarkMeasurement();
|
QBenchmarkTestMethodData::current->setResults(QTest::endBenchmarkMeasurement());
|
||||||
QBenchmarkTestMethodData::current->setResult(measurement);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/*! \internal
|
/*! \internal
|
||||||
@ -209,7 +213,7 @@ void QTest::beginBenchmarkMeasurement()
|
|||||||
|
|
||||||
/*! \internal
|
/*! \internal
|
||||||
*/
|
*/
|
||||||
QBenchmarkMeasurerBase::Measurement QTest::endBenchmarkMeasurement()
|
QList<QBenchmarkMeasurerBase::Measurement> QTest::endBenchmarkMeasurement()
|
||||||
{
|
{
|
||||||
// the clock is ticking before the line below, don't add code here.
|
// the clock is ticking before the line below, don't add code here.
|
||||||
return QBenchmarkGlobalData::current->measurer->stop();
|
return QBenchmarkGlobalData::current->measurer->stop();
|
||||||
|
@ -64,7 +64,6 @@ public:
|
|||||||
QBenchmarkMeasurerBase::Measurement measurement = { -1, QTest::FramesPerSecond };
|
QBenchmarkMeasurerBase::Measurement measurement = { -1, QTest::FramesPerSecond };
|
||||||
int iterations = -1;
|
int iterations = -1;
|
||||||
bool setByMacro = true;
|
bool setByMacro = true;
|
||||||
bool valid = false;
|
|
||||||
|
|
||||||
QBenchmarkResult() = default;
|
QBenchmarkResult() = default;
|
||||||
|
|
||||||
@ -75,7 +74,6 @@ public:
|
|||||||
, measurement(m)
|
, measurement(m)
|
||||||
, iterations(iterations)
|
, iterations(iterations)
|
||||||
, setByMacro(setByMacro)
|
, setByMacro(setByMacro)
|
||||||
, valid(true)
|
|
||||||
{ }
|
{ }
|
||||||
|
|
||||||
bool operator<(const QBenchmarkResult &other) const
|
bool operator<(const QBenchmarkResult &other) const
|
||||||
@ -134,12 +132,15 @@ public:
|
|||||||
void beginDataRun();
|
void beginDataRun();
|
||||||
void endDataRun();
|
void endDataRun();
|
||||||
|
|
||||||
bool isBenchmark() const { return result.valid; }
|
bool isBenchmark() const { return valid; }
|
||||||
bool resultsAccepted() const { return resultAccepted; }
|
bool resultsAccepted() const { return resultAccepted; }
|
||||||
int adjustIterationCount(int suggestion);
|
int adjustIterationCount(int suggestion);
|
||||||
void setResult(QBenchmarkMeasurerBase::Measurement m, bool setByMacro = true);
|
void setResults(const QList<QBenchmarkMeasurerBase::Measurement> &m, bool setByMacro = true);
|
||||||
|
void setResult(QBenchmarkMeasurerBase::Measurement m, bool setByMacro = true)
|
||||||
|
{ setResults({ m }, setByMacro); }
|
||||||
|
|
||||||
QBenchmarkResult result;
|
QList<QBenchmarkResult> results;
|
||||||
|
bool valid = false;
|
||||||
bool resultAccepted = false;
|
bool resultAccepted = false;
|
||||||
bool runOnce = false;
|
bool runOnce = false;
|
||||||
int iterationCount = -1;
|
int iterationCount = -1;
|
||||||
@ -153,7 +154,7 @@ namespace QTest
|
|||||||
void setIterationCount(int count);
|
void setIterationCount(int count);
|
||||||
|
|
||||||
void beginBenchmarkMeasurement();
|
void beginBenchmarkMeasurement();
|
||||||
QBenchmarkMeasurerBase::Measurement endBenchmarkMeasurement();
|
QList<QBenchmarkMeasurerBase::Measurement> endBenchmarkMeasurement();
|
||||||
}
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
@ -18,10 +18,10 @@ void QBenchmarkEvent::start()
|
|||||||
QAbstractEventDispatcher::instance()->installNativeEventFilter(this);
|
QAbstractEventDispatcher::instance()->installNativeEventFilter(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkEvent::stop()
|
QList<QBenchmarkMeasurerBase::Measurement> QBenchmarkEvent::stop()
|
||||||
{
|
{
|
||||||
QAbstractEventDispatcher::instance()->removeNativeEventFilter(this);
|
QAbstractEventDispatcher::instance()->removeNativeEventFilter(this);
|
||||||
return { qreal(eventCounter), QTest::Events };
|
return { { qreal(eventCounter), QTest::Events } };
|
||||||
}
|
}
|
||||||
|
|
||||||
// It's very tempting to simply reject a measurement if 0 events
|
// It's very tempting to simply reject a measurement if 0 events
|
||||||
|
@ -28,7 +28,7 @@ public:
|
|||||||
QBenchmarkEvent();
|
QBenchmarkEvent();
|
||||||
~QBenchmarkEvent();
|
~QBenchmarkEvent();
|
||||||
void start() override;
|
void start() override;
|
||||||
Measurement stop() override;
|
QList<Measurement> stop() override;
|
||||||
bool isMeasurementAccepted(Measurement measurement) override;
|
bool isMeasurementAccepted(Measurement measurement) override;
|
||||||
int adjustIterationCount(int suggestion) override;
|
int adjustIterationCount(int suggestion) override;
|
||||||
int adjustMedianCount(int suggestion) override;
|
int adjustMedianCount(int suggestion) override;
|
||||||
|
@ -16,9 +16,9 @@ void QBenchmarkTimeMeasurer::start()
|
|||||||
time.start();
|
time.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkTimeMeasurer::stop()
|
QList<QBenchmarkMeasurerBase::Measurement> QBenchmarkTimeMeasurer::stop()
|
||||||
{
|
{
|
||||||
return { qreal(time.elapsed()), QTest::WalltimeMilliseconds };
|
return { { qreal(time.elapsed()), QTest::WalltimeMilliseconds } };
|
||||||
}
|
}
|
||||||
|
|
||||||
bool QBenchmarkTimeMeasurer::isMeasurementAccepted(Measurement measurement)
|
bool QBenchmarkTimeMeasurer::isMeasurementAccepted(Measurement measurement)
|
||||||
@ -48,10 +48,10 @@ void QBenchmarkTickMeasurer::start()
|
|||||||
startTicks = getticks();
|
startTicks = getticks();
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkTickMeasurer::stop()
|
QList<QBenchmarkMeasurerBase::Measurement> QBenchmarkTickMeasurer::stop()
|
||||||
{
|
{
|
||||||
CycleCounterTicks now = getticks();
|
CycleCounterTicks now = getticks();
|
||||||
return { elapsed(now, startTicks), QTest::CPUTicks };
|
return { { elapsed(now, startTicks), QTest::CPUTicks } };
|
||||||
}
|
}
|
||||||
|
|
||||||
bool QBenchmarkTickMeasurer::isMeasurementAccepted(QBenchmarkMeasurerBase::Measurement)
|
bool QBenchmarkTickMeasurer::isMeasurementAccepted(QBenchmarkMeasurerBase::Measurement)
|
||||||
|
@ -16,6 +16,7 @@
|
|||||||
//
|
//
|
||||||
|
|
||||||
#include <QtTest/qbenchmark.h>
|
#include <QtTest/qbenchmark.h>
|
||||||
|
#include <QtCore/qlist.h>
|
||||||
#include <QtCore/private/qglobal_p.h>
|
#include <QtCore/private/qglobal_p.h>
|
||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
@ -31,7 +32,7 @@ public:
|
|||||||
virtual ~QBenchmarkMeasurerBase() = default;
|
virtual ~QBenchmarkMeasurerBase() = default;
|
||||||
virtual void init() {}
|
virtual void init() {}
|
||||||
virtual void start() = 0;
|
virtual void start() = 0;
|
||||||
virtual Measurement stop() = 0;
|
virtual QList<Measurement> stop() = 0;
|
||||||
virtual bool isMeasurementAccepted(Measurement m) = 0;
|
virtual bool isMeasurementAccepted(Measurement m) = 0;
|
||||||
virtual int adjustIterationCount(int suggestion) = 0;
|
virtual int adjustIterationCount(int suggestion) = 0;
|
||||||
virtual int adjustMedianCount(int suggestion) = 0;
|
virtual int adjustMedianCount(int suggestion) = 0;
|
||||||
|
@ -47,7 +47,13 @@
|
|||||||
|
|
||||||
QT_BEGIN_NAMESPACE
|
QT_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
struct PerfEvent
|
||||||
|
{
|
||||||
|
quint32 type;
|
||||||
|
quint64 config;
|
||||||
|
};
|
||||||
static perf_event_attr attr;
|
static perf_event_attr attr;
|
||||||
|
Q_GLOBAL_STATIC(QList<PerfEvent>, eventTypes);
|
||||||
|
|
||||||
static void initPerf()
|
static void initPerf()
|
||||||
{
|
{
|
||||||
@ -62,14 +68,20 @@ static void initPerf()
|
|||||||
attr.inherit_stat = true; // aggregate all the info from child processes
|
attr.inherit_stat = true; // aggregate all the info from child processes
|
||||||
attr.task = true; // trace fork/exits
|
attr.task = true; // trace fork/exits
|
||||||
|
|
||||||
// set a default performance counter: CPU cycles
|
|
||||||
attr.type = PERF_TYPE_HARDWARE;
|
|
||||||
attr.config = PERF_COUNT_HW_CPU_CYCLES; // default
|
|
||||||
|
|
||||||
done = true;
|
done = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static QList<PerfEvent> defaultCounters()
|
||||||
|
{
|
||||||
|
return {
|
||||||
|
{ .type = PERF_TYPE_SOFTWARE, .config = PERF_COUNT_SW_TASK_CLOCK },
|
||||||
|
{ .type = PERF_TYPE_HARDWARE, .config = PERF_COUNT_HW_CPU_CYCLES },
|
||||||
|
{ .type = PERF_TYPE_HARDWARE, .config = PERF_COUNT_HW_INSTRUCTIONS },
|
||||||
|
{ .type = PERF_TYPE_HARDWARE, .config = PERF_COUNT_HW_BRANCH_INSTRUCTIONS },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// This class does not exist in the API so it's qdoc comment marker was removed.
|
// This class does not exist in the API so it's qdoc comment marker was removed.
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -383,11 +395,11 @@ static const Events eventlist[] = {
|
|||||||
};
|
};
|
||||||
/* -- END GENERATED CODE -- */
|
/* -- END GENERATED CODE -- */
|
||||||
|
|
||||||
QTest::QBenchmarkMetric QBenchmarkPerfEventsMeasurer::metricForEvent(quint32 type, quint64 event_id)
|
static QTest::QBenchmarkMetric metricForEvent(PerfEvent counter)
|
||||||
{
|
{
|
||||||
const Events *ptr = eventlist;
|
const Events *ptr = eventlist;
|
||||||
for ( ; ptr->type != PERF_TYPE_MAX; ++ptr) {
|
for ( ; ptr->type != PERF_TYPE_MAX; ++ptr) {
|
||||||
if (ptr->type == type && ptr->event_id == event_id)
|
if (ptr->type == counter.type && ptr->event_id == counter.config)
|
||||||
return ptr->metric;
|
return ptr->metric;
|
||||||
}
|
}
|
||||||
return QTest::Events;
|
return QTest::Events;
|
||||||
@ -396,6 +408,7 @@ QTest::QBenchmarkMetric QBenchmarkPerfEventsMeasurer::metricForEvent(quint32 typ
|
|||||||
void QBenchmarkPerfEventsMeasurer::setCounter(const char *name)
|
void QBenchmarkPerfEventsMeasurer::setCounter(const char *name)
|
||||||
{
|
{
|
||||||
initPerf();
|
initPerf();
|
||||||
|
eventTypes->clear();
|
||||||
const char *colon = strchr(name, ':');
|
const char *colon = strchr(name, ':');
|
||||||
int n = colon ? colon - name : strlen(name);
|
int n = colon ? colon - name : strlen(name);
|
||||||
const Events *ptr = eventlist;
|
const Events *ptr = eventlist;
|
||||||
@ -409,8 +422,7 @@ void QBenchmarkPerfEventsMeasurer::setCounter(const char *name)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
attr.type = ptr->type;
|
*eventTypes = { { ptr->type, ptr->event_id } };
|
||||||
attr.config = ptr->event_id;
|
|
||||||
|
|
||||||
// We used to support attributes, but our code was the opposite of what
|
// We used to support attributes, but our code was the opposite of what
|
||||||
// perf(1) does, plus QBenchlib isn't exactly expected to be used to
|
// perf(1) does, plus QBenchlib isn't exactly expected to be used to
|
||||||
@ -441,7 +453,8 @@ QBenchmarkPerfEventsMeasurer::QBenchmarkPerfEventsMeasurer() = default;
|
|||||||
|
|
||||||
QBenchmarkPerfEventsMeasurer::~QBenchmarkPerfEventsMeasurer()
|
QBenchmarkPerfEventsMeasurer::~QBenchmarkPerfEventsMeasurer()
|
||||||
{
|
{
|
||||||
qt_safe_close(fd);
|
for (int fd : std::as_const(fds))
|
||||||
|
qt_safe_close(fd);
|
||||||
}
|
}
|
||||||
|
|
||||||
void QBenchmarkPerfEventsMeasurer::init()
|
void QBenchmarkPerfEventsMeasurer::init()
|
||||||
@ -451,34 +464,54 @@ void QBenchmarkPerfEventsMeasurer::init()
|
|||||||
void QBenchmarkPerfEventsMeasurer::start()
|
void QBenchmarkPerfEventsMeasurer::start()
|
||||||
{
|
{
|
||||||
initPerf();
|
initPerf();
|
||||||
if (fd == -1) {
|
QList<PerfEvent> &counters = *eventTypes;
|
||||||
|
if (counters.isEmpty())
|
||||||
|
counters = defaultCounters();
|
||||||
|
if (fds.isEmpty()) {
|
||||||
pid_t pid = 0; // attach to the current process only
|
pid_t pid = 0; // attach to the current process only
|
||||||
int cpu = -1; // on any CPU
|
int cpu = -1; // on any CPU
|
||||||
int group_fd = -1;
|
int group_fd = -1;
|
||||||
int flags = PERF_FLAG_FD_CLOEXEC;
|
int flags = PERF_FLAG_FD_CLOEXEC;
|
||||||
fd = perf_event_open(&attr, pid, cpu, group_fd, flags);
|
|
||||||
if (fd == -1) {
|
fds.reserve(counters.size());
|
||||||
// probably a paranoid kernel (/proc/sys/kernel/perf_event_paranoid)
|
for (PerfEvent counter : std::as_const(counters)) {
|
||||||
attr.exclude_kernel = true;
|
attr.type = counter.type;
|
||||||
attr.exclude_hv = true;
|
attr.config = counter.config;
|
||||||
fd = perf_event_open(&attr, pid, cpu, group_fd, flags);
|
int fd = perf_event_open(&attr, pid, cpu, group_fd, flags);
|
||||||
}
|
if (fd == -1) {
|
||||||
if (fd == -1) {
|
// probably a paranoid kernel (/proc/sys/kernel/perf_event_paranoid)
|
||||||
perror("QBenchmarkPerfEventsMeasurer::start: perf_event_open");
|
attr.exclude_kernel = true;
|
||||||
exit(1);
|
attr.exclude_hv = true;
|
||||||
|
fd = perf_event_open(&attr, pid, cpu, group_fd, flags);
|
||||||
|
}
|
||||||
|
if (fd == -1) {
|
||||||
|
perror("QBenchmarkPerfEventsMeasurer::start: perf_event_open");
|
||||||
|
exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
fds.append(fd);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// enable the counter
|
// enable the counters
|
||||||
::ioctl(fd, PERF_EVENT_IOC_RESET);
|
for (int fd : std::as_const(fds))
|
||||||
::ioctl(fd, PERF_EVENT_IOC_ENABLE);
|
::ioctl(fd, PERF_EVENT_IOC_RESET);
|
||||||
|
for (int fd : std::as_const(fds))
|
||||||
|
::ioctl(fd, PERF_EVENT_IOC_ENABLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkPerfEventsMeasurer::stop()
|
QList<QBenchmarkMeasurerBase::Measurement> QBenchmarkPerfEventsMeasurer::stop()
|
||||||
{
|
{
|
||||||
// disable the counter
|
// disable the counters
|
||||||
::ioctl(fd, PERF_EVENT_IOC_DISABLE);
|
for (int fd : std::as_const(fds))
|
||||||
return readValue();
|
::ioctl(fd, PERF_EVENT_IOC_DISABLE);
|
||||||
|
|
||||||
|
const QList<PerfEvent> &counters = *eventTypes;
|
||||||
|
QList<Measurement> result(counters.size(), {});
|
||||||
|
for (qsizetype i = 0; i < counters.size(); ++i) {
|
||||||
|
result[i] = readValue(i);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool QBenchmarkPerfEventsMeasurer::isMeasurementAccepted(Measurement)
|
bool QBenchmarkPerfEventsMeasurer::isMeasurementAccepted(Measurement)
|
||||||
@ -531,10 +564,10 @@ static quint64 rawReadValue(int fd)
|
|||||||
return results.value * (double(results.time_running) / double(results.time_enabled));
|
return results.value * (double(results.time_running) / double(results.time_enabled));
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkPerfEventsMeasurer::readValue()
|
QBenchmarkMeasurerBase::Measurement QBenchmarkPerfEventsMeasurer::readValue(qsizetype idx)
|
||||||
{
|
{
|
||||||
quint64 raw = rawReadValue(fd);
|
quint64 raw = rawReadValue(fds.at(idx));
|
||||||
return { qreal(qint64(raw)), metricForEvent(attr.type, attr.config) };
|
return { qreal(qint64(raw)), metricForEvent(eventTypes->at(idx)) };
|
||||||
}
|
}
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
@ -26,20 +26,19 @@ public:
|
|||||||
~QBenchmarkPerfEventsMeasurer();
|
~QBenchmarkPerfEventsMeasurer();
|
||||||
void init() override;
|
void init() override;
|
||||||
void start() override;
|
void start() override;
|
||||||
Measurement stop() override;
|
QList<Measurement> stop() override;
|
||||||
bool isMeasurementAccepted(Measurement measurement) override;
|
bool isMeasurementAccepted(Measurement measurement) override;
|
||||||
int adjustIterationCount(int suggestion) override;
|
int adjustIterationCount(int suggestion) override;
|
||||||
int adjustMedianCount(int suggestion) override;
|
int adjustMedianCount(int suggestion) override;
|
||||||
bool needsWarmupIteration() override { return true; }
|
bool needsWarmupIteration() override { return true; }
|
||||||
|
|
||||||
static bool isAvailable();
|
static bool isAvailable();
|
||||||
static QTest::QBenchmarkMetric metricForEvent(quint32 type, quint64 event_id);
|
|
||||||
static void setCounter(const char *name);
|
static void setCounter(const char *name);
|
||||||
static void listCounters();
|
static void listCounters();
|
||||||
private:
|
private:
|
||||||
int fd = -1;
|
QList<int> fds;
|
||||||
|
|
||||||
Measurement readValue();
|
Measurement readValue(qsizetype idx = 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
QT_END_NAMESPACE
|
QT_END_NAMESPACE
|
||||||
|
@ -25,7 +25,7 @@ class QBenchmarkTimeMeasurer : public QBenchmarkMeasurerBase
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void start() override;
|
void start() override;
|
||||||
Measurement stop() override;
|
QList<Measurement> stop() override;
|
||||||
bool isMeasurementAccepted(Measurement measurement) override;
|
bool isMeasurementAccepted(Measurement measurement) override;
|
||||||
int adjustIterationCount(int sugestion) override;
|
int adjustIterationCount(int sugestion) override;
|
||||||
int adjustMedianCount(int suggestion) override;
|
int adjustMedianCount(int suggestion) override;
|
||||||
@ -40,7 +40,7 @@ class QBenchmarkTickMeasurer : public QBenchmarkMeasurerBase
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void start() override;
|
void start() override;
|
||||||
Measurement stop() override;
|
QList<Measurement> stop() override;
|
||||||
bool isMeasurementAccepted(Measurement measurement) override;
|
bool isMeasurementAccepted(Measurement measurement) override;
|
||||||
int adjustIterationCount(int) override;
|
int adjustIterationCount(int) override;
|
||||||
int adjustMedianCount(int suggestion) override;
|
int adjustMedianCount(int suggestion) override;
|
||||||
|
@ -170,11 +170,11 @@ void QBenchmarkCallgrindMeasurer::start()
|
|||||||
CALLGRIND_ZERO_STATS;
|
CALLGRIND_ZERO_STATS;
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkMeasurerBase::Measurement QBenchmarkCallgrindMeasurer::stop()
|
QList<QBenchmarkMeasurerBase::Measurement> QBenchmarkCallgrindMeasurer::stop()
|
||||||
{
|
{
|
||||||
CALLGRIND_DUMP_STATS;
|
CALLGRIND_DUMP_STATS;
|
||||||
const qint64 result = QBenchmarkValgrindUtils::extractLastResult();
|
const qint64 result = QBenchmarkValgrindUtils::extractLastResult();
|
||||||
return { qreal(result), QTest::InstructionReads };
|
return { { qreal(result), QTest::InstructionReads } };
|
||||||
}
|
}
|
||||||
|
|
||||||
bool QBenchmarkCallgrindMeasurer::isMeasurementAccepted(Measurement measurement)
|
bool QBenchmarkCallgrindMeasurer::isMeasurementAccepted(Measurement measurement)
|
||||||
|
@ -41,7 +41,7 @@ class QBenchmarkCallgrindMeasurer : public QBenchmarkMeasurerBase
|
|||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
void start() override;
|
void start() override;
|
||||||
Measurement stop() override;
|
QList<Measurement> stop() override;
|
||||||
bool isMeasurementAccepted(Measurement measurement) override;
|
bool isMeasurementAccepted(Measurement measurement) override;
|
||||||
int adjustIterationCount(int) override;
|
int adjustIterationCount(int) override;
|
||||||
int adjustMedianCount(int) override;
|
int adjustMedianCount(int) override;
|
||||||
|
@ -1067,17 +1067,20 @@ Q_TESTLIB_EXPORT void qtest_qParseArgs(int argc, char *argv[], bool qml) {
|
|||||||
qtest_qParseArgs(argc, const_cast<const char *const *>(argv), qml);
|
qtest_qParseArgs(argc, const_cast<const char *const *>(argv), qml);
|
||||||
}
|
}
|
||||||
|
|
||||||
QBenchmarkResult qMedian(const QList<QBenchmarkResult> &container)
|
static QList<QBenchmarkResult> qMedian(const QList<QList<QBenchmarkResult>> &container)
|
||||||
{
|
{
|
||||||
const int count = container.size();
|
const int count = container.size();
|
||||||
if (count == 0)
|
if (count == 0)
|
||||||
return QBenchmarkResult();
|
return {};
|
||||||
|
|
||||||
if (count == 1)
|
if (count == 1)
|
||||||
return container.front();
|
return container.front();
|
||||||
|
|
||||||
QList<QBenchmarkResult> containerCopy = container;
|
QList<QList<QBenchmarkResult>> containerCopy = container;
|
||||||
std::sort(containerCopy.begin(), containerCopy.end());
|
std::sort(containerCopy.begin(), containerCopy.end(),
|
||||||
|
[](const QList<QBenchmarkResult> &a, const QList<QBenchmarkResult> &b) {
|
||||||
|
return a.first() < b.first();
|
||||||
|
});
|
||||||
|
|
||||||
const int middle = count / 2;
|
const int middle = count / 2;
|
||||||
|
|
||||||
@ -1104,7 +1107,7 @@ void TestMethods::invokeTestOnData(int index) const
|
|||||||
bool isBenchmark = false;
|
bool isBenchmark = false;
|
||||||
int i = (QBenchmarkGlobalData::current->measurer->needsWarmupIteration()) ? -1 : 0;
|
int i = (QBenchmarkGlobalData::current->measurer->needsWarmupIteration()) ? -1 : 0;
|
||||||
|
|
||||||
QList<QBenchmarkResult> results;
|
QList<QList<QBenchmarkResult>> resultsList;
|
||||||
bool minimumTotalReached = false;
|
bool minimumTotalReached = false;
|
||||||
do {
|
do {
|
||||||
QBenchmarkTestMethodData::current->beginDataRun();
|
QBenchmarkTestMethodData::current->beginDataRun();
|
||||||
@ -1121,8 +1124,9 @@ void TestMethods::invokeTestOnData(int index) const
|
|||||||
const bool initQuit =
|
const bool initQuit =
|
||||||
QTestResult::skipCurrentTest() || QTestResult::currentTestFailed();
|
QTestResult::skipCurrentTest() || QTestResult::currentTestFailed();
|
||||||
if (!initQuit) {
|
if (!initQuit) {
|
||||||
QBenchmarkTestMethodData::current->result = QBenchmarkResult();
|
QBenchmarkTestMethodData::current->results.clear();
|
||||||
QBenchmarkTestMethodData::current->resultAccepted = false;
|
QBenchmarkTestMethodData::current->resultAccepted = false;
|
||||||
|
QBenchmarkTestMethodData::current->valid = false;
|
||||||
|
|
||||||
QBenchmarkGlobalData::current->context.tag = QLatin1StringView(
|
QBenchmarkGlobalData::current->context.tag = QLatin1StringView(
|
||||||
QTestResult::currentDataTag() ? QTestResult::currentDataTag() : "");
|
QTestResult::currentDataTag() ? QTestResult::currentDataTag() : "");
|
||||||
@ -1164,29 +1168,29 @@ void TestMethods::invokeTestOnData(int index) const
|
|||||||
QBenchmarkTestMethodData::current->endDataRun();
|
QBenchmarkTestMethodData::current->endDataRun();
|
||||||
if (!QTestResult::skipCurrentTest() && !QTestResult::currentTestFailed()) {
|
if (!QTestResult::skipCurrentTest() && !QTestResult::currentTestFailed()) {
|
||||||
if (i > -1) // iteration -1 is the warmup iteration.
|
if (i > -1) // iteration -1 is the warmup iteration.
|
||||||
results.append(QBenchmarkTestMethodData::current->result);
|
resultsList.append(QBenchmarkTestMethodData::current->results);
|
||||||
|
|
||||||
if (isBenchmark && QBenchmarkGlobalData::current->verboseOutput) {
|
if (isBenchmark && QBenchmarkGlobalData::current->verboseOutput &&
|
||||||
if (i == -1) {
|
!QBenchmarkTestMethodData::current->results.isEmpty()) {
|
||||||
QTestLog::info(qPrintable(
|
// we only print the first result
|
||||||
QString::fromLatin1("warmup stage result : %1")
|
const QBenchmarkResult &first = QBenchmarkTestMethodData::current->results.constFirst();
|
||||||
.arg(QBenchmarkTestMethodData::current->result.measurement.value)), nullptr, 0);
|
QString pattern = i < 0 ? "warmup stage result : %1"_L1
|
||||||
} else {
|
: "accumulation stage result: %1"_L1;
|
||||||
QTestLog::info(qPrintable(
|
QTestLog::info(qPrintable(pattern.arg(first.measurement.value)), nullptr, 0);
|
||||||
QString::fromLatin1("accumulation stage result: %1")
|
|
||||||
.arg(QBenchmarkTestMethodData::current->result.measurement.value)), nullptr, 0);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify if the minimum total measurement is reached, if it was specified:
|
// Verify if the minimum total measurement (for the first measurement)
|
||||||
|
// was reached, if it was specified:
|
||||||
if (QBenchmarkGlobalData::current->minimumTotal == -1) {
|
if (QBenchmarkGlobalData::current->minimumTotal == -1) {
|
||||||
minimumTotalReached = true;
|
minimumTotalReached = true;
|
||||||
} else {
|
} else {
|
||||||
auto addResult = [](qreal current, const QBenchmarkResult& r) {
|
auto addResult = [](qreal current, const QList<QBenchmarkResult> &r) {
|
||||||
return current + r.measurement.value;
|
if (!r.isEmpty())
|
||||||
|
current += r.first().measurement.value;
|
||||||
|
return current;
|
||||||
};
|
};
|
||||||
const qreal total = std::accumulate(results.begin(), results.end(), 0.0, addResult);
|
const qreal total = std::accumulate(resultsList.begin(), resultsList.end(), 0.0, addResult);
|
||||||
minimumTotalReached = (total >= QBenchmarkGlobalData::current->minimumTotal);
|
minimumTotalReached = (total >= QBenchmarkGlobalData::current->minimumTotal);
|
||||||
}
|
}
|
||||||
} while (isBenchmark
|
} while (isBenchmark
|
||||||
@ -1198,8 +1202,12 @@ void TestMethods::invokeTestOnData(int index) const
|
|||||||
bool testPassed = !QTestResult::skipCurrentTest() && !QTestResult::currentTestFailed();
|
bool testPassed = !QTestResult::skipCurrentTest() && !QTestResult::currentTestFailed();
|
||||||
QTestResult::finishedCurrentTestDataCleanup();
|
QTestResult::finishedCurrentTestDataCleanup();
|
||||||
// Only report benchmark figures if the test passed
|
// Only report benchmark figures if the test passed
|
||||||
if (testPassed && QBenchmarkTestMethodData::current->resultsAccepted())
|
if (testPassed && QBenchmarkTestMethodData::current->resultsAccepted()) {
|
||||||
QTestLog::addBenchmarkResult(qMedian(results));
|
const QList<QBenchmarkResult> median = qMedian(resultsList);
|
||||||
|
for (auto m : median) {
|
||||||
|
QTestLog::addBenchmarkResult(m);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user