summaryrefslogtreecommitdiffstats
path: root/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py
diff options
context:
space:
mode:
Diffstat (limited to 'Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py')
-rwxr-xr-xTools/Scripts/webkitpy/performance_tests/perftest_unittest.py193
1 files changed, 186 insertions, 7 deletions
diff --git a/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py b/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py
index 21efd2c3c..078f08a46 100755
--- a/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py
+++ b/Tools/Scripts/webkitpy/performance_tests/perftest_unittest.py
@@ -31,12 +31,16 @@ import StringIO
import math
import unittest
+from webkitpy.common.host_mock import MockHost
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.layout_tests.port.driver import DriverOutput
+from webkitpy.layout_tests.port.test import TestDriver
+from webkitpy.layout_tests.port.test import TestPort
from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
from webkitpy.performance_tests.perftest import PageLoadingPerfTest
from webkitpy.performance_tests.perftest import PerfTest
from webkitpy.performance_tests.perftest import PerfTestFactory
+from webkitpy.performance_tests.perftest import ReplayPerfTest
class MainTest(unittest.TestCase):
@@ -53,7 +57,7 @@ class MainTest(unittest.TestCase):
output_capture = OutputCapture()
output_capture.capture_output()
try:
- test = PerfTest('some-test', '/path/some-dir/some-test')
+ test = PerfTest(None, 'some-test', '/path/some-dir/some-test')
self.assertEqual(test.parse_output(output),
{'some-test': {'avg': 1100.0, 'median': 1101.0, 'min': 1080.0, 'max': 1120.0, 'stdev': 11.0, 'unit': 'ms'}})
finally:
@@ -77,7 +81,7 @@ class MainTest(unittest.TestCase):
output_capture = OutputCapture()
output_capture.capture_output()
try:
- test = PerfTest('some-test', '/path/some-dir/some-test')
+ test = PerfTest(None, 'some-test', '/path/some-dir/some-test')
self.assertEqual(test.parse_output(output), None)
finally:
actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
@@ -101,7 +105,7 @@ class TestPageLoadingPerfTest(unittest.TestCase):
return DriverOutput('some output', image=None, image_hash=None, audio=None, test_time=self._values[self._index - 1])
def test_run(self):
- test = PageLoadingPerfTest('some-test', '/path/some-dir/some-test')
+ test = PageLoadingPerfTest(None, 'some-test', '/path/some-dir/some-test')
driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
output_capture = OutputCapture()
output_capture.capture_output()
@@ -118,7 +122,7 @@ class TestPageLoadingPerfTest(unittest.TestCase):
output_capture = OutputCapture()
output_capture.capture_output()
try:
- test = PageLoadingPerfTest('some-test', '/path/some-dir/some-test')
+ test = PageLoadingPerfTest(None, 'some-test', '/path/some-dir/some-test')
driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
self.assertEqual(test.run(driver, None), None)
finally:
@@ -128,17 +132,192 @@ class TestPageLoadingPerfTest(unittest.TestCase):
self.assertEqual(actual_logs, 'error: some-test\nsome error\n')
+class TestReplayPerfTest(unittest.TestCase):
+
+ class ReplayTestPort(TestPort):
+ def __init__(self, custom_run_test=None):
+
+ class ReplayTestDriver(TestDriver):
+ def run_test(self, text_input):
+ return custom_run_test(text_input) if custom_run_test else None
+
+ self._custom_driver_class = ReplayTestDriver
+ super(self.__class__, self).__init__(host=MockHost())
+
+ def _driver_class(self):
+ return self._custom_driver_class
+
+ class MockReplayServer(object):
+ def __init__(self, wait_until_ready=True):
+ self.wait_until_ready = lambda: wait_until_ready
+
+ def stop(self):
+ pass
+
+ def _add_file(self, port, dirname, filename, content=True):
+ port.host.filesystem.maybe_make_directory(dirname)
+ port.host.filesystem.files[port.host.filesystem.join(dirname, filename)] = content
+
+ def _setup_test(self, run_test=None):
+ test_port = self.ReplayTestPort(run_test)
+ self._add_file(test_port, '/path/some-dir', 'some-test.replay', 'http://some-test/')
+ test = ReplayPerfTest(test_port, 'some-test.replay', '/path/some-dir/some-test.replay')
+ test._start_replay_server = lambda archive, record: self.__class__.MockReplayServer()
+ return test, test_port
+
+ def test_run_single(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+
+ loaded_pages = []
+
+ def run_test(test_input):
+ if test_input.test_name != "about:blank":
+ self.assertEqual(test_input.test_name, 'http://some-test/')
+ loaded_pages.append(test_input)
+ self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
+ return DriverOutput('actual text', 'actual image', 'actual checksum',
+ audio=None, crash=False, timeout=False, error=False)
+
+ test, port = self._setup_test(run_test)
+ test._archive_path = '/path/some-dir/some-test.wpr'
+ test._url = 'http://some-test/'
+
+ try:
+ driver = port.create_driver(worker_number=1, no_timeout=True)
+ self.assertTrue(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100))
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+
+ self.assertEqual(len(loaded_pages), 2)
+ self.assertEqual(loaded_pages[0].test_name, 'about:blank')
+ self.assertEqual(loaded_pages[1].test_name, 'http://some-test/')
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, '')
+
+ def test_run_single_fails_without_webpagereplay(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+
+ test, port = self._setup_test()
+ test._start_replay_server = lambda archive, record: None
+ test._archive_path = '/path/some-dir.wpr'
+ test._url = 'http://some-test/'
+
+ try:
+ driver = port.create_driver(worker_number=1, no_timeout=True)
+ self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, "Web page replay didn't start.\n")
+
+ def test_prepare_fails_when_wait_until_ready_fails(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+
+ test, port = self._setup_test()
+ test._start_replay_server = lambda archive, record: self.__class__.MockReplayServer(wait_until_ready=False)
+ test._archive_path = '/path/some-dir.wpr'
+ test._url = 'http://some-test/'
+
+ try:
+ driver = port.create_driver(worker_number=1, no_timeout=True)
+ self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, "Web page replay didn't start.\n")
+
+ def test_run_single_fails_when_output_has_error(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+
+ loaded_pages = []
+
+ def run_test(test_input):
+ loaded_pages.append(test_input)
+ self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
+ return DriverOutput('actual text', 'actual image', 'actual checksum',
+ audio=None, crash=False, timeout=False, error='some error')
+
+ test, port = self._setup_test(run_test)
+ test._archive_path = '/path/some-dir.wpr'
+ test._url = 'http://some-test/'
+
+ try:
+ driver = port.create_driver(worker_number=1, no_timeout=True)
+ self.assertEqual(test.run_single(driver, '/path/some-dir/some-test.replay', time_out_ms=100), None)
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+
+ self.assertEqual(len(loaded_pages), 2)
+ self.assertEqual(loaded_pages[0].test_name, 'about:blank')
+ self.assertEqual(loaded_pages[1].test_name, 'http://some-test/')
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, 'error: some-test.replay\nsome error\n')
+
+ def test_prepare(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+
+ def run_test(test_input):
+ self._add_file(port, '/path/some-dir', 'some-test.wpr', 'wpr content')
+ return DriverOutput('actual text', 'actual image', 'actual checksum',
+ audio=None, crash=False, timeout=False, error=False)
+
+ test, port = self._setup_test(run_test)
+
+ try:
+ self.assertEqual(test.prepare(time_out_ms=100), True)
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, 'Preparing replay for some-test.replay\nPrepared replay for some-test.replay\n')
+
+ def test_prepare_calls_run_single(self):
+ output_capture = OutputCapture()
+ output_capture.capture_output()
+ called = [False]
+
+ def run_single(driver, url, time_out_ms, record):
+ self.assertTrue(record)
+ self.assertEqual(url, 'http://some-test/')
+ called[0] = True
+ return False
+
+ test, port = self._setup_test()
+ test.run_single = run_single
+
+ try:
+ self.assertEqual(test.prepare(time_out_ms=100), False)
+ finally:
+ actual_stdout, actual_stderr, actual_logs = output_capture.restore_output()
+ self.assertTrue(called[0])
+ self.assertEqual(test._archive_path, '/path/some-dir/some-test.wpr')
+ self.assertEqual(test._url, 'http://some-test/')
+ self.assertEqual(actual_stdout, '')
+ self.assertEqual(actual_stderr, '')
+ self.assertEqual(actual_logs, "Preparing replay for some-test.replay\nFailed to prepare a replay for some-test.replay\n")
+
class TestPerfTestFactory(unittest.TestCase):
def test_regular_test(self):
- test = PerfTestFactory.create_perf_test('some-dir/some-test', '/path/some-dir/some-test')
+ test = PerfTestFactory.create_perf_test(None, 'some-dir/some-test', '/path/some-dir/some-test')
self.assertEqual(test.__class__, PerfTest)
def test_inspector_test(self):
- test = PerfTestFactory.create_perf_test('inspector/some-test', '/path/inspector/some-test')
+ test = PerfTestFactory.create_perf_test(None, 'inspector/some-test', '/path/inspector/some-test')
self.assertEqual(test.__class__, ChromiumStylePerfTest)
def test_page_loading_test(self):
- test = PerfTestFactory.create_perf_test('PageLoad/some-test', '/path/PageLoad/some-test')
+ test = PerfTestFactory.create_perf_test(None, 'PageLoad/some-test', '/path/PageLoad/some-test')
self.assertEqual(test.__class__, PageLoadingPerfTest)